file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
mentions-list.component.ts
|
import {
Component,
ElementRef,
EventEmitter,
HostBinding,
OnInit,
TemplateRef,
ViewChild,
ViewEncapsulation
} from '@angular/core';
import {getCaretCoordinates} from './utils';
@Component({
selector: 'mentions-list',
template: `
<ng-template #defaultItemTemplate let-item="item">
{{transformItem(item)}}
</ng-template>
<ng-template #defaultHeaderTemplate let-item="item">
{{item.type | titlecase}}s:
</ng-template>
<ul #list class="dropdown-menu scrollable-menu">
<li *ngFor="let item of items; let i = index" [class.active]="activeIndex === i">
<span class="dropdown-header" *ngIf="headerTemplate && (i === 0 || item.type !== items[i-1].type)">
<ng-template [ngTemplateOutlet]="headerTemplate || defaultHeaderTemplate"
[ngTemplateOutletContext]="{item:item,index:i}"></ng-template>
</span>
<a href class="dropdown-item" (mousedown)="onItemClick($event, i, item)" >
<ng-template [ngTemplateOutlet]="itemTemplate"
[ngTemplateOutletContext]="{item:item,index:i}"></ng-template>
|
</a>
</li>
</ul>
`,
styles: [
'mentions-list {position: absolute;display: none;}', 'mentions-list.drop-up {transform: translateY(-100%);}',
'mentions-list.show {display: block;} mentions-list.no-items {display: none;}',
'mentions-list .scrollable-menu {display: block;height: auto;max-height:300px;overflow:auto;}',
'mentions-list li.active a {background: #f7f7f9;}',
'mentions-list li .dropdown-header {display: block;}'
],
encapsulation: ViewEncapsulation.None
})
export class NgMentionsListComponent implements OnInit {
public items: any[];
public itemTemplate: TemplateRef<any>;
public headerTemplate: TemplateRef<any>;
public displayTransform: (..._: string[]) => string;
public textAreaElement: HTMLTextAreaElement;
activeIndex: number = -1;
readonly itemSelected: EventEmitter<any> = new EventEmitter<any>();
get selectedItem(): any {
return this.activeIndex >= 0 && this.items[this.activeIndex] !== undefined ? this.items[this.activeIndex] : null;
}
@ViewChild('defaultItemTemplate') defaultItemTemplate: TemplateRef<any>;
@ViewChild('defaultHeaderTemplate') defaultHeaderTemplate: TemplateRef<any>;
@ViewChild('list') list: ElementRef;
@HostBinding('class.show') public show: boolean = false;
@HostBinding('class.drop-up') public dropUp: boolean = false;
@HostBinding('style.top')
get top(): string {
return this._top + this.adjustTop + 'px';
}
@HostBinding('style.left')
get left(): string {
return this._left + 'px';
}
@HostBinding('class.no-items')
get noItems(): boolean {
return !Array.isArray(this.items) || this.items.length === 0;
}
private _top: number = 0;
private _left: number = 0;
ngOnInit(): void {
if (!this.itemTemplate) {
this.itemTemplate = this.defaultItemTemplate;
}
}
onItemClick(event: MouseEvent, activeIndex: number, item: any) {
event.preventDefault();
this.activeIndex = activeIndex;
this.itemSelected.emit(item);
}
public selectFirstItem() {
this.activeIndex = 0;
this.resetScroll();
}
public selectPreviousItem() {
if (this.activeIndex > 0) {
this.activeIndex--;
}
this.scrollToActiveItem();
}
public selectNextItem() {
if (this.activeIndex < this.items.length - 1) {
this.activeIndex++;
this.scrollToActiveItem();
}
}
public selectLastItem() {
this.activeIndex = this.items.length > 0 ? this.items.length - 1 : 0;
this.scrollToActiveItem();
}
public position() {
const element = this.textAreaElement;
let coords = getCaretCoordinates(element, element.selectionStart);
this._top = coords.top + this.textAreaElement.parentElement.offsetTop;
this._left = coords.left + element.offsetLeft;
this.list.nativeElement.scrollTop = 0;
setTimeout(() => {
const rect = this.list.nativeElement.getBoundingClientRect();
if (rect.x + rect.width > window.innerWidth) {
const calcLeft = this._left - Math.abs(window.innerWidth - (rect.x + rect.width));
this._left = calcLeft > 0 ? calcLeft : 0;
}
})
}
public resetScroll() {
this.list.nativeElement.scrollTop = 0;
}
public transformItem(item: any) {
return this.displayTransform(item) || item;
}
private get adjustTop(): number {
let adjust = 0;
if (!this.dropUp) {
const computed = getComputedStyle(this.textAreaElement);
adjust = parseInt(computed.fontSize, 10) + this.textAreaElement.offsetTop;
}
return adjust;
}
private scrollToActiveItem() {
let element = this.list.nativeElement as HTMLElement;
setTimeout(() => {
if (this.activeIndex === 0) {
element.scrollTop = 0;
} else {
const activeElement = element.querySelector('li.active') as HTMLElement;
if (activeElement) {
element.scrollTop = activeElement.offsetTop;
}
}
});
}
}
| |
types.go
|
/*
Copyright 2018 The Crossplane Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
|
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1alpha1
import (
"strconv"
"github.com/Azure/go-autorest/autorest/to"
corev1alpha1 "github.com/crossplaneio/crossplane/pkg/apis/core/v1alpha1"
"github.com/crossplaneio/crossplane/pkg/util"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
const (
// ClusterProvisioningStateSucceeded is the state for a cluster that has succeeded provisioning
ClusterProvisioningStateSucceeded = "Succeeded"
// DefaultReclaimPolicy is the default reclaim policy to use
DefaultReclaimPolicy = corev1alpha1.ReclaimRetain
// DefaultNodeCount is the default node count for a cluster
DefaultNodeCount = 1
)
// AKSClusterSpec is the spec for AKS cluster resources
type AKSClusterSpec struct {
// ResourceGroupName is the name of the resource group that the cluster will be created in
ResourceGroupName string `json:"resourceGroupName"` //--resource-group
// Location is the Azure location that the cluster will be created in
Location string `json:"location"` //--location
// Version is the Kubernetes version that will be deployed to the cluster
Version string `json:"version"` //--kubernetes-version
// NodeCount is the number of nodes that the cluster will initially be created with. This can
// be scaled over time and defaults to 1.
// +kubebuilder:validation:Maximum=100
// +kubebuilder:validation:Minimum=0
NodeCount *int `json:"nodeCount,omitempty"` //--node-count
// NodeVMSize is the name of the worker node VM size, e.g., Standard_B2s, Standard_F2s_v2, etc.
// This value cannot be changed after cluster creation.
NodeVMSize string `json:"nodeVMSize"` //--node-vm-size
// DNSNamePrefix is the DNS name prefix to use with the hosted Kubernetes API server FQDN. You
// will use this to connect to the Kubernetes API when managing containers after creating the cluster.
DNSNamePrefix string `json:"dnsNamePrefix"` //--dns-name-prefix
// DisableRBAC determines whether RBAC will be disabled or enabled in the cluster.
DisableRBAC bool `json:"disableRBAC,omitempty"` //--disable-rbac
// Kubernetes object references
ClaimRef *corev1.ObjectReference `json:"claimRef,omitempty"`
ClassRef *corev1.ObjectReference `json:"classRef,omitempty"`
ConnectionSecretRef *corev1.LocalObjectReference `json:"connectionSecretRef,omitempty"`
ProviderRef corev1.LocalObjectReference `json:"providerRef,omitempty"`
// ReclaimPolicy identifies how to handle the cloud resource after the deletion of this type
ReclaimPolicy corev1alpha1.ReclaimPolicy `json:"reclaimPolicy,omitempty"`
}
// AKSClusterStatus is the status for AKS cluster resources
type AKSClusterStatus struct {
corev1alpha1.ConditionedStatus
corev1alpha1.BindingStatusPhase
// ClusterName is the name of the cluster as registered with the cloud provider
ClusterName string `json:"clusterName,omitempty"`
// State is the current state of the cluster
State string `json:"state,omitempty"`
// ProviderID is the external ID to identify this resource in the cloud provider
ProviderID string `json:"providerID,omitempty"`
// Endpoint is the endpoint where the cluster can be reached
Endpoint string `json:"endpoint"`
// ApplicationObjectID is the object ID of the AD application the cluster uses for Azure APIs
ApplicationObjectID string `json:"appObjectID,omitempty"`
// ServicePrincipalID is the ID of the service principal the AD application uses
ServicePrincipalID string `json:"servicePrincipalID,omitempty"`
// RunningOperation stores any current long running operation for this instance across
// reconciliation attempts. This will be a serialized Azure AKS cluster API object that will
// be used to check the status and completion of the operation during each reconciliation.
// Once the operation has completed, this field will be cleared out.
RunningOperation string `json:"runningOperation,omitempty"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// AKSCluster is the Schema for the instances API
// +k8s:openapi-gen=true
// +groupName=compute.azure
type AKSCluster struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
Spec AKSClusterSpec `json:"spec,omitempty"`
Status AKSClusterStatus `json:"status,omitempty"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// AKSClusterList contains a list of AKSCluster items
type AKSClusterList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata,omitempty"`
Items []AKSCluster `json:"items"`
}
// NewAKSClusterSpec creates a new AKSClusterSpec based on the given properties map
func NewAKSClusterSpec(properties map[string]string) *AKSClusterSpec {
spec := &AKSClusterSpec{
ReclaimPolicy: DefaultReclaimPolicy,
NodeCount: to.IntPtr(DefaultNodeCount),
}
val, ok := properties["resourceGroupName"]
if ok {
spec.ResourceGroupName = val
}
val, ok = properties["location"]
if ok {
spec.Location = val
}
val, ok = properties["version"]
if ok {
spec.Version = val
}
val, ok = properties["nodeCount"]
if ok {
if nodeCount, err := strconv.Atoi(val); err == nil {
spec.NodeCount = to.IntPtr(nodeCount)
}
}
val, ok = properties["nodeVMSize"]
if ok {
spec.NodeVMSize = val
}
val, ok = properties["dnsNamePrefix"]
if ok {
spec.DNSNamePrefix = val
}
val, ok = properties["disableRBAC"]
if ok {
if disableRBAC, err := strconv.ParseBool(val); err == nil {
spec.DisableRBAC = disableRBAC
}
}
return spec
}
// ObjectReference to this instance
func (a *AKSCluster) ObjectReference() *corev1.ObjectReference {
return util.ObjectReference(a.ObjectMeta, util.IfEmptyString(a.APIVersion, APIVersion), util.IfEmptyString(a.Kind, AKSClusterKind))
}
// OwnerReference to use this instance as an owner
func (a *AKSCluster) OwnerReference() metav1.OwnerReference {
return *util.ObjectToOwnerReference(a.ObjectReference())
}
// ConnectionSecret returns a secret object for this resource
func (a *AKSCluster) ConnectionSecret() *corev1.Secret {
return &corev1.Secret{
ObjectMeta: metav1.ObjectMeta{
Namespace: a.Namespace,
Name: a.ConnectionSecretName(),
OwnerReferences: []metav1.OwnerReference{a.OwnerReference()},
},
}
}
// ConnectionSecretName returns a secret name from the reference
func (a *AKSCluster) ConnectionSecretName() string {
if a.Spec.ConnectionSecretRef == nil {
a.Spec.ConnectionSecretRef = &corev1.LocalObjectReference{
Name: a.Name,
}
} else if a.Spec.ConnectionSecretRef.Name == "" {
a.Spec.ConnectionSecretRef.Name = a.Name
}
return a.Spec.ConnectionSecretRef.Name
}
// Endpoint returns the current endpoint of this resource
func (a *AKSCluster) Endpoint() string {
return a.Status.Endpoint
}
// State returns instance state value saved in the status (could be empty)
func (a *AKSCluster) State() string {
return a.Status.State
}
// IsAvailable for usage/binding
func (a *AKSCluster) IsAvailable() bool {
return a.State() == ClusterProvisioningStateSucceeded
}
// IsBound returns if the resource is currently bound
func (a *AKSCluster) IsBound() bool {
return a.Status.Phase == corev1alpha1.BindingStateBound
}
// SetBound sets the binding state of this resource
func (a *AKSCluster) SetBound(state bool) {
if state {
a.Status.Phase = corev1alpha1.BindingStateBound
} else {
a.Status.Phase = corev1alpha1.BindingStateUnbound
}
}
| |
pg_022.py
|
# Create a function named more_than_n that has three parameters named lst, item, and n.
# The function should return True if item appears in the list more than n times. The function should return False otherwise.
def
|
(lst, item, n):
if lst.count(item) > n:
return True
else:
return False
|
more_than_n
|
random_id.py
|
"""Where the magic happens."""
import random
from animalid import alloys, animals, colors, fabrics, opinions, origins, shapes, sizes
FIRST_ADJECTIVES = opinions + shapes + sizes
SECOND_ADJECTIVES = alloys + colors + fabrics + origins
def
|
():
"""What it's all about."""
return "_".join(
[
random.choice(FIRST_ADJECTIVES),
random.choice(SECOND_ADJECTIVES),
random.choice(animals),
]
)
|
generate_animal_id
|
gl_context.rs
|
// Copyright (C) 2018 Víctor Jáquez <[email protected]>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use glib::translate::*;
use glib::IsA;
use gst_gl_sys;
use libc::uintptr_t;
use GLContext;
use GLDisplay;
use GLPlatform;
use GLAPI;
impl GLContext {
pub unsafe fn ne
|
: IsA<GLDisplay>>(
display: &T,
handle: uintptr_t,
context_type: GLPlatform,
available_apis: GLAPI,
) -> Option<GLContext> {
from_glib_full(gst_gl_sys::gst_gl_context_new_wrapped(
display.as_ref().to_glib_none().0,
handle,
context_type.to_glib(),
available_apis.to_glib(),
))
}
pub fn get_current_gl_context(context_type: GLPlatform) -> uintptr_t {
unsafe {
gst_gl_sys::gst_gl_context_get_current_gl_context(context_type.to_glib()) as uintptr_t
}
}
pub fn get_proc_address_with_platform(
context_type: GLPlatform,
gl_api: GLAPI,
name: &str,
) -> uintptr_t {
unsafe {
gst_gl_sys::gst_gl_context_get_proc_address_with_platform(
context_type.to_glib(),
gl_api.to_glib(),
name.to_glib_none().0,
) as uintptr_t
}
}
}
pub trait GLContextExtManual: 'static {
fn get_gl_context(&self) -> uintptr_t;
fn get_proc_address(&self, name: &str) -> uintptr_t;
}
impl<O: IsA<GLContext>> GLContextExtManual for O {
fn get_gl_context(&self) -> uintptr_t {
unsafe {
gst_gl_sys::gst_gl_context_get_gl_context(self.as_ref().to_glib_none().0) as uintptr_t
}
}
fn get_proc_address(&self, name: &str) -> uintptr_t {
unsafe {
gst_gl_sys::gst_gl_context_get_proc_address(
self.as_ref().to_glib_none().0,
name.to_glib_none().0,
) as uintptr_t
}
}
}
|
w_wrapped<T
|
tree.ts
|
const axios = require('axios').default;
function clearValidTrees()
{
let tree = document.querySelector('#page-tree .cms_tree');
if (null !== tree) {
tree.classList.remove('valid-target-tree');
}
let treeNodeItems = document.querySelectorAll('#page-tree .cms_tree-node-item');
treeNodeItems.forEach((treeNodeItem) => {
treeNodeItem.classList.remove('valid-target');
});
}
function
|
()
{
let treeSaveButton = document.querySelector('.btn-tree-save');
// @ts-ignore
let pageTree = $('#page-tree > .cms_tree').sortable({
group: 'serialization',
containerSelector: '.cms_tree',
nested: true,
itemSelector: '.cms_tree-node',
placeholder: '<div class="placeholder"><i class="fas fa-arrow-right"></i></div>',
isValidTarget: function ($item: any, container: any) {
let containerElement = <HTMLElement>container.el[0];
return containerElement.classList.contains('valid-target-tree');
},
onCancel: ($item: any, container: any, _super: any) => {
// Clear valid trees.
clearValidTrees();
},
onDrop: ($item: any, container: any, _super: any) => {
// Clear valid trees.
clearValidTrees();
// Highlight tree save button.
if (null !== treeSaveButton) {
treeSaveButton.classList.remove('btn-dark');
treeSaveButton.classList.add('btn-success');
}
},
onDragStart: ($item: any, container: any, _super: any) => {
let element = <HTMLElement>$item[0];
let elementName = element.dataset.elementName;
// Sections are not draggable.
if ('Section' === elementName) {
return false;
}
// Highlight tree save button.
if (null !== treeSaveButton) {
treeSaveButton.classList.remove('btn-dark');
treeSaveButton.classList.add('btn-success');
}
// Look at every tree and see If this item is allowed.
let trees = document.querySelectorAll('#page-tree .cms_tree');
trees.forEach((tree: HTMLElement) => {
let isChild = $.contains(element, tree); // True if this is a child of the items being dragged.
let acceptedTypes = 'children' in tree.dataset ? tree.dataset.children : '';
if (isChild === false && ('all' === acceptedTypes || acceptedTypes.split(',').indexOf(elementName) !== -1)) {
tree.classList.add('valid-target-tree');
let dropTargets = tree.parentElement.querySelectorAll('.cms_tree-node-item');
dropTargets.forEach((dropTarget) => {
dropTarget.classList.add('valid-target');
});
} else {
tree.classList.remove('valid-target-tree');
let dropTargets = tree.parentElement.querySelectorAll('.cms_tree-node-item');
dropTargets.forEach((dropTarget) => {
dropTarget.classList.remove('valid-target');
});
}
});
},
afterMove: () => {
// Highlight tree save button.
if (null !== treeSaveButton) {
treeSaveButton.classList.remove('btn-dark');
treeSaveButton.classList.add('btn-success');
}
}
});
// Save page tree on button click.
if (null !== treeSaveButton) {
treeSaveButton.addEventListener('click', (event) => {
event.preventDefault();
let pageUuidInput = <HTMLInputElement>document.getElementById('tree-pageUuid');
let pageUuid = pageUuidInput.value;
let onVersionInput = <HTMLInputElement>document.getElementById('tree-onVersion');
let onVersion = onVersionInput.value;
let data = pageTree.sortable('serialize').get();
// Submit page tree sort data.
let pageTreeSaveUrl = `/admin/page/save-order/${pageUuid}/${onVersion}?ajax=1`;
axios.post(pageTreeSaveUrl, data)
.then(function (response: any) {
// handle success.
})
.catch(function (error: any) {
// handle error
})
.finally(function () {
// always executed
window.location.reload();
});
});
}
// Highlight elements on page when hovering them in page tree.
let dropTargets = document.querySelectorAll('.cms_tree-node-item');
dropTargets.forEach((dropTarget: HTMLElement) => {
let uuid = dropTarget.parentElement.dataset.uuid;
let elementSelector = `[data-uuid="${uuid}"]`;
let pageFrame = <HTMLIFrameElement>document.getElementById('page-frame');
let hoveredElement = pageFrame.contentDocument.querySelector(elementSelector);
dropTarget.addEventListener('mouseenter', () => {
// Highlight element on page.
hoveredElement.classList.add('editor-highlight');
});
dropTarget.addEventListener('mouseleave', () => {
// Un-highlight element on page.
hoveredElement.classList.remove('editor-highlight');
});
});
}
let updateTree = function(pageUuid: string, userId: any)
{
let pageTreeUrl = '/admin/api/page-tree/' + pageUuid + '/' + userId;
axios.get(pageTreeUrl)
.then(function (response: any) {
// handle success
// Replace pageTree content.
let pageTree = document.getElementById('page-tree');
if (null !== pageTree) {
pageTree.innerHTML = response.data;
}
bindTree();
})
.catch(function (error: any) {
// handle error
console.log(error);
})
.finally(function () {
// always executed
});
};
export default updateTree;
|
bindTree
|
alms.py
|
import numpy as np
import healpy as hp
try:
from pixell import curvedsky, enmap
except:
pass
try: # PySM >= 3.2.1
import pysm3.units as u
import pysm3 as pysm
except ImportError:
import pysm.units as u
import pysm
class PrecomputedAlms(object):
def __init__(
self,
filename,
input_units="uK_CMB",
input_reference_frequency=None,
nside=None,
target_shape=None,
target_wcs=None,
from_cl=False,
from_cl_seed=None,
precompute_output_map=True,
has_polarization=True,
map_dist=None,
):
"""Generic component based on Precomputed Alms
Load a set of Alms from a FITS file and generate maps at the requested
resolution and frequency assuming the CMB black body spectrum.
A single set of Alms is used for all frequencies requested by PySM,
consider that PySM expects the output of components to be in uK_RJ.
See more details at https://so-pysm-models.readthedocs.io/en/latest/so_pysm_models/models.html
Also note that the Alms are clipped to 3*nside-1 to avoid
artifacts from high-ell components which cannot be properly represented
by a low-nside map.
Parameters
----------
filename : string
Path to the input Alms in FITS format
input_units : string
Input unit strings as defined by pysm.convert_units, e.g. K_CMB, uK_RJ, MJysr
input_reference_frequency: float
If input units are K_RJ or Jysr, the reference frequency
nside : int
HEALPix NSIDE of the output maps
from_cl : bool
If True, the input file contains C_ell instead of a_lm,
they should provided with the healpy old ordering TT, TE, TB, EE, EB, BB, sorry.
from_cl_seed : int
Seed set just before synalm to simulate the alms from the C_ell,
necessary to set it in order to get the same input map for different runs
only used if `from_cl` is True
precompute_output_map : bool
If True (default), Alms are transformed into a map in the constructor,
if False, the object only stores the Alms and generate the map at each
call of the signal method, this is useful to generate maps convolved
with different beams
has_polarization : bool
whether or not to simulate also polarization maps
Default: True
"""
self.nside = nside
self.shape = target_shape
self.wcs = target_wcs
self.filename = filename
self.input_units = u.Unit(input_units)
self.has_polarization = has_polarization
if from_cl:
np.random.seed(from_cl_seed)
cl = hp.read_cl(self.filename)
if not self.has_polarization and cl.ndim > 1:
cl = cl[0]
# using healpy old ordering TT, TE, TB, EE, EB, BB
alm = hp.synalm(cl, new=False, verbose=False)
else:
alm = np.complex128(
hp.read_alm(
self.filename, hdu=(1, 2, 3) if self.has_polarization else 1
)
)
self.equivalencies = (
None
if input_reference_frequency is None
else u.cmb_equivalencies(input_reference_frequency)
)
if precompute_output_map:
self.output_map = self.compute_output_map(alm)
else:
self.alm = alm
def compute_output_map(self, alm):
lmax = hp.Alm.getlmax(alm.shape[-1]) # we assume mmax = lmax
if self.nside is None:
assert (self.shape is not None) and (self.wcs is not None)
n_comp = 3 if self.has_polarization else 1
output_map = enmap.empty((n_comp,) + self.shape[-2:], self.wcs)
curvedsky.alm2map(alm, output_map, spin=[0, 2], verbose=True)
elif self.nside is not None:
if lmax > 3*self.nside-1:
clip = np.ones(3*self.nside)
if alm.ndim == 1:
alm_clipped = hp.almxfl(alm, clip)
else:
alm_clipped = [hp.almxfl(each, clip) for each in alm]
else:
alm_clipped = alm
output_map = hp.alm2map(alm_clipped, self.nside)
else:
raise ValueError("You must specify either nside or both of shape and wcs")
return (output_map << self.input_units).to(
u.uK_CMB, equivalencies=self.equivalencies
)
@u.quantity_input
def get_emission(
self,
freqs: u.GHz,
fwhm: [u.arcmin, None] = None,
weights=None,
output_units=u.uK_RJ,
):
|
"""Return map in uK_RJ at given frequency or array of frequencies
Parameters
----------
freqs : list or ndarray
Frequency or frequencies in GHz at which compute the signal
fwhm : float (optional)
Smooth the input alms before computing the signal, this can only be used
if the class was initialized with `precompute_output_map` to False.
output_units : str
Output units, as defined in `pysm.convert_units`, by default this is
"uK_RJ" as expected by PySM.
Returns
-------
output_maps : ndarray
Output maps array with the shape (num_freqs, 1 or 3 (I or IQU), npix)
"""
freqs = pysm.utils.check_freq_input(freqs)
weights = pysm.utils.normalize_weights(freqs, weights)
try:
output_map = self.output_map
except AttributeError:
if fwhm is None:
alm = self.alm
else:
alm = hp.smoothalm(
self.alm, fwhm=fwhm.to_value(u.radian), pol=True, inplace=False
)
output_map = self.compute_output_map(alm)
output_units = u.Unit(output_units)
assert output_units in [u.uK_RJ, u.uK_CMB]
if output_units == u.uK_RJ:
convert_to_uK_RJ = (
np.ones(len(freqs), dtype=np.double) * u.uK_CMB
).to_value(u.uK_RJ, equivalencies=u.cmb_equivalencies(freqs * u.GHz))
if len(freqs) == 1:
scaling_factor = convert_to_uK_RJ[0]
else:
scaling_factor = np.trapz(convert_to_uK_RJ * weights, x=freqs)
return output_map.value * scaling_factor << u.uK_RJ
elif output_units == output_map.unit:
return output_map
|
|
v2beta2ObjectMetricSource.d.ts
|
/**
* Kubernetes
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: v1.13.9
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
import { V2beta2CrossVersionObjectReference } from './v2beta2CrossVersionObjectReference';
import { V2beta2MetricIdentifier } from './v2beta2MetricIdentifier';
import { V2beta2MetricTarget } from './v2beta2MetricTarget';
/**
* ObjectMetricSource indicates how to scale on a metric describing a kubernetes object (for example, hits-per-second on an Ingress object).
*/
export declare class
|
{
'describedObject': V2beta2CrossVersionObjectReference;
'metric': V2beta2MetricIdentifier;
'target': V2beta2MetricTarget;
static discriminator: string | undefined;
static attributeTypeMap: Array<{
name: string;
baseName: string;
type: string;
}>;
static getAttributeTypeMap(): {
name: string;
baseName: string;
type: string;
}[];
}
|
V2beta2ObjectMetricSource
|
state_service_test.go
|
package escrow
import (
"bytes"
"crypto/ecdsa"
"encoding/hex"
"errors"
"github.com/ethereum/go-ethereum/accounts/abi"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto"
"github.com/singnet/snet-daemon/blockchain"
"github.com/stretchr/testify/assert"
"math/big"
"testing"
)
type stateServiceTestType struct {
service PaymentChannelStateService
senderAddress common.Address
signerPrivateKey *ecdsa.PrivateKey
signerAddress common.Address
channelServiceMock *paymentChannelServiceMock
paymentStorage *PaymentStorage
ethereumBlock *big.Int
defaultChannelId *big.Int
defaultChannelKey *PaymentChannelKey
defaultChannelData *PaymentChannelData
defaultRequest *ChannelStateRequest
defaultReply *ChannelStateReply
}
var stateServiceTest = func() stateServiceTestType {
channelServiceMock := &paymentChannelServiceMock{}
senderAddress := crypto.PubkeyToAddress(GenerateTestPrivateKey().PublicKey)
signerPrivateKey := GenerateTestPrivateKey()
signerAddress := crypto.PubkeyToAddress(signerPrivateKey.PublicKey)
channelServiceMock.blockchainReader = &BlockchainChannelReader{}
ethereumBlock := big.NewInt(53)
defaultChannelId := big.NewInt(42)
channelServiceMock.blockchainReader.readChannelFromBlockchain = func(channelID *big.Int) (*blockchain.MultiPartyEscrowChannel, bool, error) {
mpeChannel := &blockchain.MultiPartyEscrowChannel{
Recipient: senderAddress,
Nonce: big.NewInt(3),
}
return mpeChannel, true, nil
}
channelServiceMock.blockchainReader.recipientPaymentAddress = func() common.Address {
return senderAddress
}
defaultSignature, err := hex.DecodeString("0504030201")
if err != nil {
panic("Could not make defaultSignature")
}
paymentStorage := NewPaymentStorage(NewMemStorage())
verificationAddress := common.HexToAddress("0xf25186b5081ff5ce73482ad761db0eb0d25abfbf")
return stateServiceTestType{
service: PaymentChannelStateService{
channelService: channelServiceMock,
paymentStorage: paymentStorage,
mpeAddress: func() common.Address { return verificationAddress },
compareWithLatestBlockNumber: func(*big.Int) error { return nil },
},
senderAddress: senderAddress,
signerPrivateKey: signerPrivateKey,
signerAddress: signerAddress,
channelServiceMock: channelServiceMock,
ethereumBlock: ethereumBlock,
defaultChannelId: defaultChannelId,
defaultChannelKey: &PaymentChannelKey{ID: defaultChannelId},
defaultChannelData: &PaymentChannelData{
ChannelID: defaultChannelId,
Sender: senderAddress,
Signer: signerAddress,
Signature: defaultSignature,
Nonce: big.NewInt(3),
AuthorizedAmount: big.NewInt(12345),
},
defaultRequest: &ChannelStateRequest{
CurrentBlock: ethereumBlock.Uint64(),
ChannelId: bigIntToBytes(defaultChannelId),
Signature: getSignature(
getChannelStateRequestMessage(verificationAddress, defaultChannelId, ethereumBlock),
signerPrivateKey,
),
},
defaultReply: &ChannelStateReply{
CurrentNonce: bigIntToBytes(big.NewInt(3)),
CurrentSignedAmount: bigIntToBytes(big.NewInt(12345)),
CurrentSignature: defaultSignature,
},
}
}()
func getChannelStateRequestMessage(mpeAddress common.Address, channelId *big.Int, currentBlock *big.Int) []byte {
message := bytes.Join([][]byte{
[]byte("__get_channel_state"),
mpeAddress.Bytes(),
bigIntToBytes(channelId),
abi.U256(currentBlock),
}, nil)
return message
}
func cleanup() {
stateServiceTest.channelServiceMock.blockchainReader.readChannelFromBlockchain = func(channelID *big.Int) (*blockchain.MultiPartyEscrowChannel, bool, error) {
mpeChannel := &blockchain.MultiPartyEscrowChannel{
Recipient: stateServiceTest.senderAddress,
Nonce: big.NewInt(3),
}
return mpeChannel, true, nil
}
paymentStorage := NewPaymentStorage(NewMemStorage())
stateServiceTest.service.paymentStorage = paymentStorage
stateServiceTest.channelServiceMock.Clear()
}
func TestGetChannelState(t *testing.T) {
stateServiceTest.channelServiceMock.Put(
stateServiceTest.defaultChannelKey,
stateServiceTest.defaultChannelData,
)
defer stateServiceTest.channelServiceMock.Clear()
reply, err := stateServiceTest.service.GetChannelState(
nil,
stateServiceTest.defaultRequest,
)
assert.Nil(t, err)
assert.Equal(t, stateServiceTest.defaultReply, reply)
}
func TestGetChannelStateWhenNonceDiffers(t *testing.T) {
previousSignature, _ := hex.DecodeString("0708090A0B")
previousChannelData := &PaymentChannelData{
ChannelID: stateServiceTest.defaultChannelId,
Sender: stateServiceTest.senderAddress,
Signer: stateServiceTest.signerAddress,
Signature: previousSignature,
Nonce: big.NewInt(2),
AuthorizedAmount: big.NewInt(123),
}
stateServiceTest.channelServiceMock.Put(
stateServiceTest.defaultChannelKey,
stateServiceTest.defaultChannelData,
)
payment := getPaymentFromChannel(previousChannelData)
stateServiceTest.service.paymentStorage.Put(payment)
stateServiceTest.channelServiceMock.blockchainReader.readChannelFromBlockchain = func(channelID *big.Int) (*blockchain.MultiPartyEscrowChannel, bool, error) {
mpeChannel := &blockchain.MultiPartyEscrowChannel{
Recipient: stateServiceTest.senderAddress,
Nonce: big.NewInt(2),
}
return mpeChannel, true, nil
}
defer cleanup()
reply, err := stateServiceTest.service.GetChannelState(
nil,
stateServiceTest.defaultRequest,
)
assert.Nil(t, err)
assert.Equal(t, bigIntToBytes(big.NewInt(3)), reply.CurrentNonce)
assert.Equal(t, stateServiceTest.defaultChannelData.Signature, reply.CurrentSignature)
assert.Equal(t, bigIntToBytes(big.NewInt(12345)), reply.CurrentSignedAmount)
assert.Equal(t, bigIntToBytes(big.NewInt(123)), reply.OldNonceSignedAmount)
assert.Equal(t, previousChannelData.Signature, reply.OldNonceSignature)
}
func TestGetChannelStateChannelIdIncorrectSignature(t *testing.T) {
reply, err := stateServiceTest.service.GetChannelState(
nil,
&ChannelStateRequest{
ChannelId: bigIntToBytes(stateServiceTest.defaultChannelId),
Signature: []byte{0x00},
},
)
assert.Equal(t, errors.New("incorrect signature"), err)
assert.Nil(t, reply)
}
func TestGetChannelStateChannelStorageError(t *testing.T) {
stateServiceTest.channelServiceMock.SetError(errors.New("storage error"))
defer stateServiceTest.channelServiceMock.Clear()
reply, err := stateServiceTest.service.GetChannelState(nil, stateServiceTest.defaultRequest)
assert.Equal(t, errors.New("channel error:storage error"), err)
assert.Nil(t, reply)
}
func TestGetChannelStateChannelNotFound(t *testing.T) {
channelId := big.NewInt(42)
stateServiceTest.channelServiceMock.Clear()
reply, err := stateServiceTest.service.GetChannelState(
nil,
&ChannelStateRequest{
ChannelId: bigIntToBytes(channelId),
Signature: getSignature(bigIntToBytes(channelId), stateServiceTest.signerPrivateKey),
},
)
assert.Equal(t, errors.New("channel is not found, channelId: 42"), err)
assert.Nil(t, reply)
}
func TestGetChannelStateIncorrectSender(t *testing.T) {
stateServiceTest.channelServiceMock.Put(
stateServiceTest.defaultChannelKey,
stateServiceTest.defaultChannelData,
)
defer stateServiceTest.channelServiceMock.Clear()
reply, err := stateServiceTest.service.GetChannelState(
nil,
&ChannelStateRequest{
ChannelId: bigIntToBytes(stateServiceTest.defaultChannelId),
Signature: getSignature(
bigIntToBytes(stateServiceTest.defaultChannelId),
GenerateTestPrivateKey()),
},
)
assert.Equal(t, errors.New("only channel signer can get latest channel state"), err)
assert.Nil(t, reply)
}
func TestGetChannelStateNoOperationsOnThisChannelYet(t *testing.T) {
channelData := stateServiceTest.defaultChannelData
channelData.AuthorizedAmount = nil
channelData.Signature = nil
stateServiceTest.channelServiceMock.Put(
stateServiceTest.defaultChannelKey,
channelData,
)
defer stateServiceTest.channelServiceMock.Clear()
reply, err := stateServiceTest.service.GetChannelState(
nil,
stateServiceTest.defaultRequest,
)
assert.Nil(t, err)
expectedReply := stateServiceTest.defaultReply
expectedReply.CurrentSignedAmount = nil
expectedReply.CurrentSignature = nil
expectedReply.OldNonceSignature = nil
expectedReply.OldNonceSignedAmount = nil
assert.Equal(t, expectedReply, reply)
}
func TestGetChannelStateBlockchainError(t *testing.T) {
stateServiceTest.channelServiceMock.Put(
stateServiceTest.defaultChannelKey,
stateServiceTest.defaultChannelData,
)
stateServiceTest.channelServiceMock.blockchainReader.readChannelFromBlockchain =
func(channelID *big.Int) (*blockchain.MultiPartyEscrowChannel, bool, error) {
return nil, false, errors.New("Test error from blockchain reads")
}
defer cleanup()
reply, err := stateServiceTest.service.GetChannelState(
nil,
stateServiceTest.defaultRequest,
)
assert.Nil(t, reply)
assert.Equal(t, errors.New("channel error:Test error from blockchain reads"), err)
}
func
|
(t *testing.T) {
stateServiceTest.channelServiceMock.Put(
stateServiceTest.defaultChannelKey,
stateServiceTest.defaultChannelData,
)
stateServiceTest.channelServiceMock.blockchainReader.readChannelFromBlockchain =
func(channelID *big.Int) (*blockchain.MultiPartyEscrowChannel, bool, error) {
return nil, false, nil
}
defer cleanup()
reply, err := stateServiceTest.service.GetChannelState(
nil,
stateServiceTest.defaultRequest,
)
assert.Nil(t, reply)
assert.Equal(t, errors.New("unable to read channel details from blockchain."), err)
}
func TestGetChannelStateNonceIncrementedInBlockchainNoOldPayment(t *testing.T) {
stateServiceTest.channelServiceMock.Put(
stateServiceTest.defaultChannelKey,
stateServiceTest.defaultChannelData,
)
blockchainChannelData := &blockchain.MultiPartyEscrowChannel{
Recipient: stateServiceTest.senderAddress,
Nonce: big.NewInt(0).Sub(stateServiceTest.defaultChannelData.Nonce, big.NewInt(1)),
}
stateServiceTest.channelServiceMock.blockchainReader.readChannelFromBlockchain =
func(channelID *big.Int) (*blockchain.MultiPartyEscrowChannel, bool, error) {
return blockchainChannelData, true, nil
}
defer cleanup()
reply, err := stateServiceTest.service.GetChannelState(
nil,
stateServiceTest.defaultRequest,
)
assert.Nil(t, reply)
assert.Equal(t, errors.New("channel has different nonce in local storage and blockchain and old payment is not found in storage"), err)
}
// Claim tests are already added to escrow_test.go
|
TestGetChannelStateNoChannelInBlockchain
|
clock.ts
|
/**
* Copyright 2018, OpenCensus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* The Clock class is used to record the duration and endTime for spans.
*/
export class
|
{
/** Indicates if the clock is endend. */
private endedLocal = false;
/** Indicates the clock's start time. */
private startTimeLocal: Date;
/** The time in high resolution in a [seconds, nanoseconds]. */
private hrtimeLocal: [number, number];
/** The duration between start and end of the clock. */
private diff: [number, number] = [0, 0];
/** Constructs a new SamplerImpl instance. */
constructor() {
this.startTimeLocal = new Date();
this.hrtimeLocal = process.hrtime();
}
/** Ends the clock. */
end(): void {
if (this.endedLocal) {
return;
}
this.diff = process.hrtime(this.hrtimeLocal);
this.endedLocal = true;
}
/** Gets the duration of the clock. */
get duration(): number {
if (!this.endedLocal) {
return 0;
}
const ns = this.diff[0] * 1e9 + this.diff[1];
return ns / 1e6;
}
/** Starts the clock. */
get startTime(): Date {
return this.startTimeLocal;
}
/**
* Gets the time so far.
* @returns A Date object with the current duration.
*/
get endTime(): Date {
if (this.ended) {
return new Date(this.startTime.getTime() + this.duration);
}
return new Date();
}
/** Indicates if the clock was ended. */
get ended(): boolean {
return this.endedLocal;
}
}
|
Clock
|
services.ts
|
/*
* MIT License
*
* Copyright (c) 2017-2019 Stefano Cappa
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
import { ExampleService } from './example.service';
import { GithubService } from './github.service';
import { AuthService } from './auth.service';
import { AuthGuard } from './auth-guard.service';
export { ExampleService } from './example.service';
export { GithubService } from './github.service';
export { AuthService } from './auth.service';
export { AuthGuard } from './auth-guard.service';
/**
* Array of core services
*/
export const CORE_SERVICES: any[] = [ExampleService, GithubService, AuthService, AuthGuard];
| |
std.go
|
// Copyright 2013 The Go Circuit Project
// Use of this source code is governed by the license for
// The Go Circuit Project, found in the LICENSE file.
//
// Authors:
// 2013 Petar Maymounkov <[email protected]>
package main
import (
"io"
"os"
"github.com/pkg/errors"
"github.com/urfave/cli"
)
func stdin(x *cli.Context) (err error) {
defer func() {
if r := recover(); r != nil {
err = errors.Wrapf(r.(error), "error, likely due to missing server or misspelled anchor: %v", r)
}
}()
c := dial(x)
args := x.Args()
if args.Len() != 1 {
return errors.New("stdin needs one anchor argument")
}
w, _ := parseGlob(args.Get(0))
u, ok := c.Walk(w).Get().(interface {
Stdin() io.WriteCloser
})
if !ok {
return errors.New("not a process or a container")
}
q := u.Stdin()
if _, err = io.Copy(q, os.Stdin); err != nil {
return errors.Wrapf(err, "transmission error: %v", err)
}
if err = q.Close(); err != nil {
return errors.Wrapf(err, "error closing stdin: %v", err)
}
return
}
func stdout(x *cli.Context) (err error) {
defer func() {
if r := recover(); r != nil {
err = errors.Wrapf(r.(error), "error, likely due to missing server or misspelled anchor: %v", r)
}
}()
c := dial(x)
args := x.Args()
if args.Len() != 1 {
return errors.New("stdout needs one anchor argument")
}
w, _ := parseGlob(args.Get(0))
u, ok := c.Walk(w).Get().(interface {
Stdout() io.ReadCloser
})
if !ok {
return errors.New("not a process or a container")
}
io.Copy(os.Stdout, u.Stdout())
return
}
func stderr(x *cli.Context) (err error) {
defer func() {
if r := recover(); r != nil {
err = errors.Wrapf(r.(error), "error, likely due to missing server or misspelled anchor: %v", r)
}
}()
c := dial(x)
args := x.Args()
if args.Len() != 1
|
w, _ := parseGlob(args.Get(0))
u, ok := c.Walk(w).Get().(interface {
Stderr() io.ReadCloser
})
if !ok {
return errors.New("not a process or a container")
}
io.Copy(os.Stdout, u.Stderr())
// if _, err := io.Copy(os.Stdout, u.Stderr()); err != nil {
// fatalf("transmission error: %v", err)
// }
return
}
|
{
return errors.New("stderr needs one anchor argument")
}
|
binary-search.js
|
// Binary search in js
function
|
(arr, x, start, end) {
// Base Condition
if (start > end) return "Element not found";
// Find the middle index
let mid = Math.floor((start + end) / 2);
// Compare mid with given key x
if (arr[mid] === x) return `Found ${arr[mid]} at index ${mid}`;
// If element at mid is greater than x,
// search in the left half of mid
if (arr[mid] > x) return recursiveBinarySearch(arr, x, start, mid - 1);
// If element at mid is smaller than x,
// search in the right half of mid
else return recursiveBinarySearch(arr, x, mid + 1, end);
}
const arr = [3, 4, 5, 2, 6];
const x = 3;
console.log(recursiveBinarySearch(arr, x, 0, arr.length - 1));
|
recursiveBinarySearch
|
file_test.go
|
package fs
import (
"io/ioutil"
"os"
"testing"
)
const lockTestPath = "test.lock"
var lockTestMode = os.FileMode(0666)
func testLockFile(fs FileSystem, t *testing.T) {
fs.Remove(lockTestPath)
lock, needRecovery, err := fs.CreateLockFile(lockTestPath, lockTestMode)
if lock == nil || needRecovery || err != nil {
t.Fatal(lock, err, needRecovery)
}
lock2, needRecovery2, err2 := fs.CreateLockFile(lockTestPath, lockTestMode)
if lock2 != nil || needRecovery2 || err2 != os.ErrExist {
t.Fatal(lock2, needRecovery2, err2)
}
if err := lock.Unlock(); err != nil {
t.Fatal(err)
}
if _, err := fs.Stat(lockTestPath); err == nil {
t.Fatal()
}
}
func testLockFileNeedsRecovery(fs FileSystem, t *testing.T) {
ioutil.WriteFile(lockTestPath, []byte{}, lockTestMode)
lock, needRecovery, err := fs.CreateLockFile(lockTestPath, lockTestMode)
if lock == nil || !needRecovery || err != nil {
t.Fatal(lock, err, needRecovery)
}
|
t.Fatal(err)
}
if _, err := fs.Stat(lockTestPath); err == nil {
t.Fatal()
}
}
|
if err := lock.Unlock(); err != nil {
|
waterflow.js
|
//ConfigurableFirmData
var five = require("johnny-five");
var board = new five.Board({
port: "COM5"
});
var Stream = require('stream');
var flow_stream = new Stream();
var plotly = require('plotly')('workshop','v6w5xlbx9j');
var data = [{
x : [],
y : [],
stream : {
token : '25tm9197rz',
maxpoints : 5000
}
}];
var layout = { fileopt : "overwrite", filename : "Water Flow" };
var pulses = 0;
var lastFlowRateTimer = 0;
board.on("ready", function() {
this.pinMode(14, five.Pin.INPUT);
lastFlowPinState = 0;
// Check Digital Pin to see if theres a change
var x = this.digitalRead(14, function(value) {
// send the pin status to flowSignal helper
flowSignal(value);
//console.log(value);
});
// Set how often to Emit data to Plotly
setInterval(function() {
var litres = pulses;
litres /= 7.5;
litres /= 60;
var data = {x:getDateString(), y:litres};
flow_stream.emit('data', JSON.stringify(data)+'\n');
console.log("estos son los listros",lastFlowRateTimer)
}, 1000);
// Set up Graph + Initialize stream + Pipe to stream
plotly.plot(data,layout,function (err, msg) {
if (err) console.log(err);
console.log(msg);
var stream = plotly.stream('25tm9197rz', function (err, res) {
if (err) console.log(err);
console.log(res);
});
flow_stream.pipe(stream);
});
});
// helper function to keep track of pulses
function flowSignal (value) {
if (value === 0) {
lastFlowRateTimer ++;
return;
}
if (value === 1) {
pulses ++;
}
lastFlowPinState = value;
flowrate = 1000.0;
flowrate /= lastFlowRateTimer;
lastFlowRateTimer = 0;
}
// little helper function to get a nicely formatted date string
function getDateString () {
|
// for your timezone just multiply +/-GMT by 3600000
var datestr = new Date(time - 14400000).toISOString().replace(/T/, ' ').replace(/Z/, '');
return datestr;
}
|
var time = new Date();
// 14400000 is (GMT-4 Montreal)
|
APIFunctions.js
|
import { getBonApiToken, getFoodRepoToken } from "./APIKeys.js";
import { Alert } from 'react-native';
async function httpGet(theUrl, token) {
// let xmlHttp = new XMLHttpRequest();
// xmlHttp.responseType = "json";
// xmlHttp.open("GET", theUrl); //opens get req
// xmlHttp.setRequestHeader("Authorization", token); //adds token
// xmlHttp.send(null); //send
// console.log(xmlHttp.response)
// return xmlHttp.response; //returns response raw
// let xmlHttp = new XMLHttpRequest();
// let test = "";
// xmlHttp.onreadystatechange = (e) => {
// if (xmlHttp.readyState !== 4) {
// return;
// }
// if (xmlHttp.status === 200) {
// console.log(xmlHttp.response);
// test = xmlHttp.response;
// } else {
// Alert.alert('error');
// }
// };
// xmlHttp.open("GET", theUrl); //opens get req
// xmlHttp.setRequestHeader("Authorization", token); //adds token
// xmlHttp.send();
// console.log(test);
// return xmlHttp.response;
const response = await fetch(theUrl, {method:'GET', headers:{Authorization: token}});
const json = await response.json();
return json;
}
function formatToken(token) {
return "Token " + token; //tokens for both work this way.
}
//FOOD REPO
function getFoodRepoURL(ids) {
return (
"https://www.foodrepo.org/api/v3/products?excludes=images%2Cnutrients&barcodes=" +
ids
); //ids are joined by a comma
}
function
|
(ids, token) {
let key = formatToken(token);
let url = getFoodRepoURL(ids); //%2C is a comma i guess?
return httpGet(url, key);
}
function foodRepoParseName(request) {
let i;
let names = [];
for (i = 0; i < request["data"].length; i++) {
names = names.push(request["data"][i]["name_translations"]["en"]); //take english names only
}
return names;
}
//BONAPI
function getBonApiURL(foods, conditions, diet) {
let joiner = "";
let conditions_f = "";
let diet_f = "";
//empty if both condition and diet are false
if (conditions) {
joiner = "/?";
conditions_f = "allergies=" + conditions;
if (diet) {
diet_f = "&diet=" + diet; //ampersand if both are present
}
} else {
if (diet) {
joiner = "/?";
diet_f = "diet=" + diet;
}
}
return (
"https://bon-api.com/api/v1/ingredient/alternatives/" +
foods +
joiner +
conditions_f +
diet_f
);
}
function bonApiRequest(foods, allergies, diet, token) {
let key = formatToken(token);
let url = getBonApiURL(foods.join(","), allergies.join(","), diet);
return httpGet(url, key);
}
function bonAPIParseIngredients(request) {
return [
request["response"]["matched_ingredients"],
request["response"]["updated_ingredients"],
];
}
export function main(barcodes, allergies, diet){
let token = getFoodRepoToken;
foodRepoRequest(barcodes,token).then(test => {
let foodnames = foodRepoParseName(JSON.parse(test));
let foods = foodnames.join(',').split(' ').join('_').split(',');
if (foods[0]){
token = getBonApiToken;
let ingredients = bonApiRequest(foods,allergies,diet,token);
if (ingredients.includes("BonAPI | Error 500")){
return[-1, "Recipe not found"];
} else {
let lmfao = bonAPIParseIngredients(ingredients).then(parsed => {
return [foods, parsed[0], parsed[1]];
});
return lmfao;
}
} else {
return [-1, "Barcode not found"];
}
});
}
//////////////////////EVERYTHING BELOW HERE IS JUST TO RUN MAIN.
//fetching names from ids (in app only use one at a time)
var ids = ["0099482434366", "0646670317873", "0184706000042"];
//fetching replacements given params
var foodnames = ["eggs", "bread", "milk"];
var diet_enum = ["meateater", "pescetarian", "vegetarian", "vegan"];
var allergies_enum = [
"mollusc_allergy",
"mustard_allergy",
"sesame_allergy",
"gluten_allergy",
"lactose_intolerance",
"soy_allergy",
"egg_allergy",
"fish_allergy",
"celergy_allergy",
"crustacean_allergy",
"peanut_allergy",
"tree_nut_allergy",
"wheat_allergy",
"lupin_allergy",
"milk_allergy",
];
var diet = diet_enum[1];
var allergies = ["gluten_allergy", "egg_allergy", "mustard_allergy"];
|
foodRepoRequest
|
pml-transaction.component.ts
|
import { Component, OnInit } from '@angular/core';
import { PmlTrasaction } from '../../_models';
import { Router } from '@angular/router';
import { ApiService } from '../../_services';
@Component({
selector: 'app-pml-transaction',
templateUrl: './pml-transaction.component.html',
styleUrls: ['./pml-transaction.component.scss']
})
export class PmlTransactionComponent implements OnInit {
response: any;
success = false;
message = '';
pmltransactions: Array<PmlTrasaction>;
constructor( private router: Router, private apiService: ApiService) { }
ngOnInit() {
if (!window.localStorage.getItem('token')) {
this.router.navigate(['login']);
return;
}
const storedRecords = window.localStorage.getItem('pmltransaction');
const updated = window.localStorage.getItem('pmltransaction_updated');
if (storedRecords && updated) {
this.pmltransactions = JSON.parse(storedRecords);
this.success = true;
this.message = `Records retrieved since ${updated}`;
} else {
this.pmltransactionRetrieve();
}
}
pmltransactionRetrieve(): void {
this.apiService.retrievePmlTransaction().subscribe(data => {
this.response = data;
this.pmltransactions = this.response.payload;
this.success = this.response.success;
this.message = this.response.message;
if (this.response.success) {
window.localStorage.setItem('pmltransaction', JSON.stringify(this.response.payload));
window.localStorage.setItem('pmltransaction_updated', JSON.stringify(new Date()));
}
});
}
|
window.localStorage.removeItem('pmltransactionDetailId');
window.localStorage.setItem('pmltransactionDetailId', pmltransaction.id);
this.router.navigate(['pml-transaction/detail'])
.then(nav => { console.log(nav); }, err => {console.log(err); });
console.log('Navigating to pmltransaction detail');
return;
}
pmltransactionDelete(pmltransaction: PmlTrasaction): void {
this.apiService.deletePmlTransaction(pmltransaction.id).subscribe( data => {
this.pmltransactions = this.pmltransactions.filter(i => i.id !== pmltransaction.id);
window.localStorage.setItem('pmltransaction', JSON.stringify(this.pmltransactions));
});
}
pmltransactionEdit(pmltransaction: PmlTrasaction): void {
window.localStorage.removeItem('pmltransactionEditId');
window.localStorage.setItem('pmltransactionEditId', pmltransaction.id);
this.router.navigate(['pml-transaction/edit']);
}
pmltransactionAdd(): void {
this.router.navigate(['pml-transaction/add']);
}
}
|
pmltransactionDetail(pmltransaction: PmlTrasaction): void {
|
nodes.go
|
package clusterconfig
import (
"context"
"fmt"
"strings"
"k8s.io/apimachinery/pkg/runtime"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
_ "k8s.io/apimachinery/pkg/runtime/serializer/yaml"
"k8s.io/client-go/kubernetes"
corev1client "k8s.io/client-go/kubernetes/typed/core/v1"
"github.com/openshift/insights-operator/pkg/record"
)
// GatherNodes collects all Nodes.
//
// The Kubernetes api https://github.com/kubernetes/client-go/blob/master/kubernetes/typed/core/v1/node.go#L78
// Response see https://docs.openshift.com/container-platform/4.3/rest_api/index.html#nodelist-v1core
//
// Location in archive: config/node/
func GatherNodes(g *Gatherer) func() ([]record.Record, []error) {
return func() ([]record.Record, []error) {
gatherKubeClient, err := kubernetes.NewForConfig(g.gatherProtoKubeConfig)
if err != nil {
return nil, []error{err}
}
return gatherNodes(g.ctx, gatherKubeClient.CoreV1())
}
}
func gatherNodes(ctx context.Context, coreClient corev1client.CoreV1Interface) ([]record.Record, []error) {
nodes, err := coreClient.Nodes().List(ctx, metav1.ListOptions{})
if err != nil {
return nil, []error{err}
}
records := make([]record.Record, 0, len(nodes.Items))
for i, node := range nodes.Items {
records = append(records, record.Record{Name: fmt.Sprintf("config/node/%s", node.Name), Item: NodeAnonymizer{&nodes.Items[i]}})
}
return records, nil
}
// NodeAnonymizer implements serialization of Node with anonymization
type NodeAnonymizer struct{ *corev1.Node }
// Marshal implements serialization of Node with anonymization
func (a NodeAnonymizer) Marshal(_ context.Context) ([]byte, error) {
return runtime.Encode(kubeSerializer, anonymizeNode(a.Node))
}
// GetExtension returns extension for anonymized node objects
func (a NodeAnonymizer) GetExtension() string {
return "json"
}
func anonymizeNode(node *corev1.Node) *corev1.Node {
for k := range node.Annotations {
if isProductNamespacedKey(k) {
continue
}
node.Annotations[k] = ""
}
for k, v := range node.Labels {
if isProductNamespacedKey(k)
|
node.Labels[k] = anonymizeString(v)
}
for i := range node.Status.Addresses {
node.Status.Addresses[i].Address = anonymizeURL(node.Status.Addresses[i].Address)
}
node.Status.NodeInfo.BootID = anonymizeString(node.Status.NodeInfo.BootID)
node.Status.NodeInfo.SystemUUID = anonymizeString(node.Status.NodeInfo.SystemUUID)
node.Status.NodeInfo.MachineID = anonymizeString(node.Status.NodeInfo.MachineID)
node.Status.Images = nil
return node
}
func isProductNamespacedKey(key string) bool {
return strings.Contains(key, "openshift.io/") || strings.Contains(key, "k8s.io/") || strings.Contains(key, "kubernetes.io/")
}
|
{
continue
}
|
api_op_GetRateBasedStatementManagedKeys.go
|
// Code generated by smithy-go-codegen DO NOT EDIT.
package wafv2
import (
"context"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/wafv2/types"
smithy "github.com/awslabs/smithy-go"
"github.com/awslabs/smithy-go/middleware"
smithyhttp "github.com/awslabs/smithy-go/transport/http"
)
// This is the latest version of AWS WAF, named AWS WAFV2, released in November,
// 2019. For information, including how to migrate your AWS WAF resources from the
// prior release, see the AWS WAF Developer Guide
// (https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html).
// Retrieves the keys that are currently blocked by a rate-based rule. The maximum
// number of managed keys that can be blocked for a single rate-based rule is
// 10,000. If more than 10,000 addresses exceed the rate limit, those with the
// highest rates are blocked.
func (c *Client) GetRateBasedStatementManagedKeys(ctx context.Context, params *GetRateBasedStatementManagedKeysInput, optFns ...func(*Options)) (*GetRateBasedStatementManagedKeysOutput, error) {
stack := middleware.NewStack("GetRateBasedStatementManagedKeys", smithyhttp.NewStackRequest)
options := c.options.Copy()
for _, fn := range optFns {
fn(&options)
}
addawsAwsjson11_serdeOpGetRateBasedStatementManagedKeysMiddlewares(stack)
awsmiddleware.AddRequestInvocationIDMiddleware(stack)
smithyhttp.AddContentLengthMiddleware(stack)
addResolveEndpointMiddleware(stack, options)
v4.AddComputePayloadSHA256Middleware(stack)
addRetryMiddlewares(stack, options)
addHTTPSignerV4Middleware(stack, options)
awsmiddleware.AddAttemptClockSkewMiddleware(stack)
addClientUserAgent(stack)
smithyhttp.AddErrorCloseResponseBodyMiddleware(stack)
smithyhttp.AddCloseResponseBodyMiddleware(stack)
addOpGetRateBasedStatementManagedKeysValidationMiddleware(stack)
stack.Initialize.Add(newServiceMetadataMiddleware_opGetRateBasedStatementManagedKeys(options.Region), middleware.Before)
addRequestIDRetrieverMiddleware(stack)
addResponseErrorMiddleware(stack)
for _, fn := range options.APIOptions {
if err := fn(stack); err != nil
|
}
handler := middleware.DecorateHandler(smithyhttp.NewClientHandler(options.HTTPClient), stack)
result, metadata, err := handler.Handle(ctx, params)
if err != nil {
return nil, &smithy.OperationError{
ServiceID: ServiceID,
OperationName: "GetRateBasedStatementManagedKeys",
Err: err,
}
}
out := result.(*GetRateBasedStatementManagedKeysOutput)
out.ResultMetadata = metadata
return out, nil
}
type GetRateBasedStatementManagedKeysInput struct {
// The name of the rate-based rule to get the keys for.
//
// This member is required.
RuleName *string
// Specifies whether this is for an AWS CloudFront distribution or for a regional
// application. A regional application can be an Application Load Balancer (ALB) or
// an API Gateway stage. To work with CloudFront, you must also specify the Region
// US East (N. Virginia) as follows:
//
// * CLI - Specify the Region when you use
// the CloudFront scope: --scope=CLOUDFRONT --region=us-east-1.
//
// * API and SDKs
// - For all calls, use the Region endpoint us-east-1.
//
// This member is required.
Scope types.Scope
// The unique identifier for the Web ACL. This ID is returned in the responses to
// create and list commands. You provide it to operations like update and delete.
//
// This member is required.
WebACLId *string
// The name of the Web ACL. You cannot change the name of a Web ACL after you
// create it.
//
// This member is required.
WebACLName *string
}
type GetRateBasedStatementManagedKeysOutput struct {
// The keys that are of Internet Protocol version 4 (IPv4).
ManagedKeysIPV4 *types.RateBasedStatementManagedKeysIPSet
// The keys that are of Internet Protocol version 6 (IPv6).
ManagedKeysIPV6 *types.RateBasedStatementManagedKeysIPSet
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
}
func addawsAwsjson11_serdeOpGetRateBasedStatementManagedKeysMiddlewares(stack *middleware.Stack) {
stack.Serialize.Add(&awsAwsjson11_serializeOpGetRateBasedStatementManagedKeys{}, middleware.After)
stack.Deserialize.Add(&awsAwsjson11_deserializeOpGetRateBasedStatementManagedKeys{}, middleware.After)
}
func newServiceMetadataMiddleware_opGetRateBasedStatementManagedKeys(region string) awsmiddleware.RegisterServiceMetadata {
return awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "wafv2",
OperationName: "GetRateBasedStatementManagedKeys",
}
}
|
{
return nil, err
}
|
labels.go
|
// Copyright © 2021 Alibaba Group Holding Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package plugin
import (
"fmt"
"strings"
"github.com/sealerio/sealer/logger"
"github.com/sealerio/sealer/pkg/client/k8s"
strUtils "github.com/sealerio/sealer/utils/strings"
v1 "k8s.io/api/core/v1"
)
type LabelsNodes struct {
data map[string][]label
client *k8s.Client
}
type label struct {
key string
value string
}
func NewLabelsPlugin() Interface {
return &LabelsNodes{data: map[string][]label{}}
}
func init() {
Register(LabelPlugin, NewLabelsPlugin())
}
func (l LabelsNodes) Run(context Context, phase Phase) error {
if phase != PhasePreGuest || context.Plugin.Spec.Type != LabelPlugin {
logger.Warn("current phase is %s, label need set action to `PreGuest` !")
return nil
}
c, err := k8s.Newk8sClient()
if err != nil {
return err
}
l.client = c
l.data = l.formatData(context.Plugin.Spec.Data, context.Host)
nodeList, err := l.client.ListNodes()
if err != nil {
return fmt.Errorf("current cluster nodes not found, %v", err)
}
for _, v := range nodeList.Items {
internalIP := l.getAddress(v.Status.Addresses)
labels, ok := l.data[internalIP]
if ok {
|
}
return nil
}
func (l LabelsNodes) formatData(data string, hosts []string) map[string][]label {
m := make(map[string][]label)
items := strings.Split(data, "\n")
if len(items) == 0 {
logger.Debug("label data is empty!")
return m
}
for _, v := range items {
tmps := strings.Split(strings.TrimSpace(v), " ")
if len(tmps) != 2 {
//logger.Warn("label data is no-compliance with the rules! label data: %v", v)
continue
}
ip := tmps[0]
if strUtils.NotIn(ip, hosts) {
continue
}
labelStr := strings.Split(tmps[1], ",")
var labels []label
for _, l := range labelStr {
tmp := strings.Split(l, "=")
if len(tmp) != 2 {
logger.Warn("label data is no-compliance with the rules! label data: %v", l)
continue
}
labels = append(labels, label{
key: tmp[0],
value: tmp[1],
})
}
m[ip] = labels
}
return m
}
func (l LabelsNodes) getAddress(addresses []v1.NodeAddress) string {
for _, v := range addresses {
if strings.EqualFold(string(v.Type), "InternalIP") {
return v.Address
}
}
return ""
}
|
m := v.GetLabels()
for _, val := range labels {
m[val.key] = val.value
}
v.SetLabels(m)
v.SetResourceVersion("")
if _, err := l.client.UpdateNode(v); err != nil {
return fmt.Errorf("current cluster nodes label failed, %v", err)
}
logger.Info("successfully added node %s labels %v.", internalIP, labels)
}
|
exec.js
|
import MockRedis from 'ioredis';
describe('exec', () => {
it('should resolve Promise.all after all operations is done', () => {
const redis = new MockRedis({
data: {
user_next: '1',
post_next: '1',
},
});
|
return redis
.multi([
['incr', 'user_next'],
['incr', 'post_next'],
])
.exec()
.then((results) =>
expect(results).toEqual([
[null, 2],
[null, 2],
])
);
});
it('should support a callback function', (done) => {
const redis = new MockRedis({
data: {
user_next: '1',
post_next: '1',
},
});
redis
.multi([
['incr', 'user_next'],
['incr', 'post_next'],
])
.exec((err, results) => {
expect(results).toEqual([
[null, 2],
[null, 2],
]);
done();
});
});
});
| |
worker.go
|
package gorush
import (
"context"
"errors"
"sync"
)
// InitWorkers for initialize all workers.
func InitWorkers(ctx context.Context, wg *sync.WaitGroup, workerNum int64, queueNum int64) {
LogAccess.Info("worker number is ", workerNum, ", queue number is ", queueNum)
QueueNotification = make(chan PushNotification, queueNum)
for i := int64(0); i < workerNum; i++ {
go startWorker(ctx, wg, i)
}
}
// SendNotification is send message to iOS or Android
func SendNotification(req PushNotification)
|
func startWorker(ctx context.Context, wg *sync.WaitGroup, num int64) {
defer wg.Done()
for notification := range QueueNotification {
SendNotification(notification)
}
LogAccess.Info("closed the worker num ", num)
}
// markFailedNotification adds failure logs for all tokens in push notification
func markFailedNotification(notification *PushNotification, reason string) {
LogError.Error(reason)
for _, token := range notification.Tokens {
notification.AddLog(getLogPushEntry(FailedPush, token, *notification, errors.New(reason)))
}
notification.WaitDone()
}
// queueNotification add notification to queue list.
func queueNotification(ctx context.Context, req RequestPush) (int, []LogPushEntry) {
var count int
wg := sync.WaitGroup{}
newNotification := []*PushNotification{}
for i := range req.Notifications {
notification := &req.Notifications[i]
switch notification.Platform {
case PlatFormIos:
if !PushConf.Ios.Enabled {
continue
}
case PlatFormAndroid:
if !PushConf.Android.Enabled {
continue
}
}
newNotification = append(newNotification, notification)
}
log := make([]LogPushEntry, 0, count)
for _, notification := range newNotification {
notification.ctx = ctx
if PushConf.Core.Sync {
notification.wg = &wg
notification.log = &log
notification.AddWaitCount()
}
if !tryEnqueue(*notification, QueueNotification) {
markFailedNotification(notification, "max capacity reached")
}
count += len(notification.Tokens)
// Count topic message
if notification.To != "" {
count++
}
}
if PushConf.Core.Sync {
wg.Wait()
}
StatStorage.AddTotalCount(int64(count))
return count, log
}
// tryEnqueue tries to enqueue a job to the given job channel. Returns true if
// the operation was successful, and false if enqueuing would not have been
// possible without blocking. Job is not enqueued in the latter case.
func tryEnqueue(job PushNotification, jobChan chan<- PushNotification) bool {
select {
case jobChan <- job:
return true
default:
return false
}
}
|
{
if PushConf.Core.Sync {
defer req.WaitDone()
}
select {
//case <-req.ctx.Done():
default:
switch req.Platform {
case PlatFormIos:
PushToIOS(req)
case PlatFormAndroid:
PushToAndroid(req)
}
}
}
|
gateio.py
|
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
# -----------------------------------------------------------------------------
try:
basestring # Python 3
except NameError:
basestring = str # Python 2
import hashlib
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NotSupported
from ccxt.base.errors import DDoSProtection
class gateio (Exchange):
def describe(self):
return self.deep_extend(super(gateio, self).describe(), {
'id': 'gateio',
'name': 'Gate.io',
'countries': ['CN'],
'version': '2',
'rateLimit': 1000,
'has': {
'CORS': False,
'createMarketOrder': False,
'fetchTickers': True,
'withdraw': True,
'createDepositAddress': True,
'fetchDepositAddress': True,
'fetchClosedOrders': True,
'fetchOpenOrders': True,
'fetchOrderTrades': True,
'fetchOrders': True,
'fetchOrder': True,
'fetchMyTrades': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/31784029-0313c702-b509-11e7-9ccc-bc0da6a0e435.jpg',
'api': {
'public': 'https://data.gate.io/api',
'private': 'https://data.gate.io/api',
},
'www': 'https://gate.io/',
'doc': 'https://gate.io/api2',
'fees': [
'https://gate.io/fee',
'https://support.gate.io/hc/en-us/articles/115003577673',
],
},
'api': {
'public': {
'get': [
'pairs',
'marketinfo',
'marketlist',
'tickers',
'ticker/{id}',
'orderBook/{id}',
'trade/{id}',
'tradeHistory/{id}',
'tradeHistory/{id}/{tid}',
],
},
'private': {
'post': [
'balances',
'depositAddress',
'newAddress',
'depositsWithdrawals',
'buy',
'sell',
'cancelOrder',
'cancelAllOrders',
'getOrder',
'openOrders',
'tradeHistory',
'withdraw',
],
},
},
'fees': {
'trading': {
'tierBased': True,
'percentage': True,
'maker': 0.002,
'taker': 0.002,
},
},
'exceptions': {
'4': DDoSProtection,
'7': NotSupported,
'8': NotSupported,
'9': NotSupported,
'15': DDoSProtection,
'16': OrderNotFound,
'17': OrderNotFound,
'21': InsufficientFunds,
},
# https://gate.io/api2#errCode
'errorCodeNames': {
'1': 'Invalid request',
'2': 'Invalid version',
'3': 'Invalid request',
'4': 'Too many attempts',
'5': 'Invalid sign',
'6': 'Invalid sign',
'7': 'Currency is not supported',
'8': 'Currency is not supported',
'9': 'Currency is not supported',
'10': 'Verified failed',
'11': 'Obtaining address failed',
'12': 'Empty params',
'13': 'Internal error, please report to administrator',
'14': 'Invalid user',
'15': 'Cancel order too fast, please wait 1 min and try again',
'16': 'Invalid order id or order is already closed',
'17': 'Invalid orderid',
'18': 'Invalid amount',
'19': 'Not permitted or trade is disabled',
'20': 'Your order size is too small',
'21': 'You don\'t have enough fund',
},
'options': {
'limits': {
'cost': {
'min': {
'BTC': 0.0001,
'ETH': 0.001,
'USDT': 1,
},
},
},
},
})
async def fetch_markets(self, params={}):
response = await self.publicGetMarketinfo()
markets = self.safe_value(response, 'pairs')
if not markets:
raise ExchangeError(self.id + ' fetchMarkets got an unrecognized response')
result = []
for i in range(0, len(markets)):
market = markets[i]
keys = list(market.keys())
id = keys[0]
details = market[id]
baseId, quoteId = id.split('_')
base = baseId.upper()
quote = quoteId.upper()
base = self.common_currency_code(base)
quote = self.common_currency_code(quote)
symbol = base + '/' + quote
precision = {
'amount': 8,
'price': details['decimal_places'],
}
amountLimits = {
'min': details['min_amount'],
'max': None,
}
priceLimits = {
'min': math.pow(10, -details['decimal_places']),
'max': None,
}
defaultCost = amountLimits['min'] * priceLimits['min']
minCost = self.safe_float(self.options['limits']['cost']['min'], quote, defaultCost)
costLimits = {
'min': minCost,
'max': None,
}
limits = {
'amount': amountLimits,
'price': priceLimits,
'cost': costLimits,
}
active = True
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'info': market,
'active': active,
'maker': details['fee'] / 100,
'taker': details['fee'] / 100,
'precision': precision,
'limits': limits,
})
return result
async def fetch_balance(self, params={}):
await self.load_markets()
balance = await self.privatePostBalances()
result = {'info': balance}
currencies = list(self.currencies.keys())
for i in range(0, len(currencies)):
currency = currencies[i]
code = self.common_currency_code(currency)
account = self.account()
if 'available' in balance:
if currency in balance['available']:
account['free'] = float(balance['available'][currency])
if 'locked' in balance:
if currency in balance['locked']:
account['used'] = float(balance['locked'][currency])
account['total'] = self.sum(account['free'], account['used'])
result[code] = account
return self.parse_balance(result)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
orderbook = await self.publicGetOrderBookId(self.extend({
'id': self.market_id(symbol),
}, params))
return self.parse_order_book(orderbook)
def parse_ticker(self, ticker, market=None):
timestamp = self.milliseconds()
symbol = None
if market:
symbol = market['symbol']
last = self.safe_float(ticker, 'last')
percentage = self.safe_float(ticker, 'percentChange')
open = None
change = None
average = None
if (last is not None) and(percentage is not None):
relativeChange = percentage / 100
open = last / self.sum(1, relativeChange)
change = last - open
average = self.sum(last, open) / 2
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high24hr'),
'low': self.safe_float(ticker, 'low24hr'),
'bid': self.safe_float(ticker, 'highestBid'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'lowestAsk'),
'askVolume': None,
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': change,
'percentage': percentage,
'average': average,
'baseVolume': self.safe_float(ticker, 'quoteVolume'),
'quoteVolume': self.safe_float(ticker, 'baseVolume'),
'info': ticker,
}
def handle_errors(self, code, reason, url, method, headers, body, response):
if len(body) <= 0:
return
if body[0] != '{':
return
resultString = self.safe_string(response, 'result', '')
if resultString != 'false':
return
errorCode = self.safe_string(response, 'code')
if errorCode is not None:
exceptions = self.exceptions
errorCodeNames = self.errorCodeNames
if errorCode in exceptions:
message = ''
if errorCode in errorCodeNames:
message = errorCodeNames[errorCode]
else:
message = self.safe_string(response, 'message', '(unknown)')
raise exceptions[errorCode](message)
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
tickers = await self.publicGetTickers(params)
result = {}
ids = list(tickers.keys())
for i in range(0, len(ids)):
id = ids[i]
baseId, quoteId = id.split('_')
base = baseId.upper()
quote = quoteId.upper()
base = self.common_currency_code(base)
quote = self.common_currency_code(quote)
symbol = base + '/' + quote
ticker = tickers[id]
market = None
if symbol in self.markets:
market = self.markets[symbol]
if id in self.markets_by_id:
market = self.markets_by_id[id]
result[symbol] = self.parse_ticker(ticker, market)
return result
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
ticker = await self.publicGetTickerId(self.extend({
'id': market['id'],
}, params))
return self.parse_ticker(ticker, market)
def parse_trade(self, trade, market):
# public fetchTrades
timestamp = self.safe_integer(trade, 'timestamp')
# private fetchMyTrades
timestamp = self.safe_integer(trade, 'time_unix', timestamp)
if timestamp is not None:
timestamp *= 1000
id = self.safe_string(trade, 'tradeID')
id = self.safe_string(trade, 'id', id)
# take either of orderid or orderId
orderId = self.safe_string(trade, 'orderid')
orderId = self.safe_string(trade, 'orderNumber', orderId)
price = self.safe_float(trade, 'rate')
amount = self.safe_float(trade, 'amount')
cost = None
if price is not None:
if amount is not None:
cost = price * amount
return {
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': market['symbol'],
'order': orderId,
'type': None,
'side': trade['type'],
'price': price,
'amount': amount,
'cost': cost,
'fee': None,
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
response = await self.publicGetTradeHistoryId(self.extend({
'id': market['id'],
}, params))
return self.parse_trades(response['data'], market, since, limit)
async def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
response = await self.privatePostOpenOrders(params)
return self.parse_orders(response['orders'], None, since, limit)
async def fetch_order(self, id, symbol=None, params={}):
await self.load_markets()
response = await self.privatePostGetOrder(self.extend({
'orderNumber': id,
'currencyPair': self.market_id(symbol),
}, params))
return self.parse_order(response['order'])
def parse_order_status(self, status):
statuses = {
'cancelled': 'canceled',
# 'closed': 'closed', # these two statuses aren't actually needed
# 'open': 'open', # as they are mapped one-to-one
}
if status in statuses:
return statuses[status]
return status
def parse_order(self, order, market=None):
#
# {'amount': '0.00000000',
# 'currencyPair': 'xlm_usdt',
# 'fee': '0.0113766632239302 USDT',
# 'feeCurrency': 'USDT',
# 'feePercentage': 0.18,
# 'feeValue': '0.0113766632239302',
# 'filledAmount': '30.14004987',
# 'filledRate': 0.2097,
# 'initialAmount': '30.14004987',
# 'initialRate': '0.2097',
# 'left': 0,
# 'orderNumber': '998307286',
# 'rate': '0.2097',
# 'status': 'closed',
# 'timestamp': 1531158583,
# 'type': 'sell'},
#
id = self.safe_string(order, 'orderNumber')
symbol = None
marketId = self.safe_string(order, 'currencyPair')
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
if market is not None:
symbol = market['symbol']
timestamp = self.safe_integer(order, 'timestamp')
if timestamp is not None:
timestamp *= 1000
status = self.parse_order_status(self.safe_string(order, 'status'))
side = self.safe_string(order, 'type')
price = self.safe_float(order, 'filledRate')
amount = self.safe_float(order, 'initialAmount')
filled = self.safe_float(order, 'filledAmount')
remaining = self.safe_float(order, 'leftAmount')
if remaining is None:
# In the order status response, self field has a different name.
remaining = self.safe_float(order, 'left')
feeCost = self.safe_float(order, 'feeValue')
feeCurrency = self.safe_string(order, 'feeCurrency')
feeRate = self.safe_float(order, 'feePercentage')
if feeRate is not None:
feeRate = feeRate / 100
if feeCurrency is not None:
if feeCurrency in self.currencies_by_id:
feeCurrency = self.currencies_by_id[feeCurrency]['code']
return {
'id': id,
'datetime': self.iso8601(timestamp),
'timestamp': timestamp,
'status': status,
'symbol': symbol,
'type': 'limit',
'side': side,
'price': price,
'cost': None,
'amount': amount,
'filled': filled,
'remaining': remaining,
'trades': None,
'fee': {
'cost': feeCost,
'currency': feeCurrency,
'rate': feeRate,
},
'info': order,
}
async def create_order(self, symbol, type, side, amount, price=None, params={}):
if type == 'market':
raise ExchangeError(self.id + ' allows limit orders only')
await self.load_markets()
method = 'privatePost' + self.capitalize(side)
market = self.market(symbol)
order = {
'currencyPair': market['id'],
'rate': price,
'amount': amount,
}
response = await getattr(self, method)(self.extend(order, params))
return self.parse_order(self.extend({
'status': 'open',
'type': side,
'initialAmount': amount,
}, response), market)
async def cancel_order(self, id, symbol=None, params={}):
|
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder requires symbol argument')
await self.load_markets()
return await self.privatePostCancelOrder({
'orderNumber': id,
'currencyPair': self.market_id(symbol),
})
async def query_deposit_address(self, method, code, params={}):
await self.load_markets()
currency = self.currency(code)
method = 'privatePost' + method + 'Address'
response = await getattr(self, method)(self.extend({
'currency': currency['id'],
}, params))
address = self.safe_string(response, 'addr')
tag = None
if (address is not None) and(address.find('address') >= 0):
raise InvalidAddress(self.id + ' queryDepositAddress ' + address)
if code == 'XRP':
parts = address.split(' ')
address = parts[0]
tag = parts[1]
return {
'currency': currency,
'address': address,
'tag': tag,
'info': response,
}
async def create_deposit_address(self, code, params={}):
return await self.query_deposit_address('New', code, params)
async def fetch_deposit_address(self, code, params={}):
return await self.query_deposit_address('Deposit', code, params)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
response = await self.privatePostOpenOrders()
return self.parse_orders(response['orders'], market, since, limit)
async def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
response = await self.privatePostTradeHistory(self.extend({
'currencyPair': market['id'],
'orderNumber': id,
}, params))
return self.parse_trades(response['trades'], market, since, limit)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ExchangeError(self.id + ' fetchMyTrades requires symbol param')
await self.load_markets()
market = self.market(symbol)
id = market['id']
response = await self.privatePostTradeHistory(self.extend({'currencyPair': id}, params))
return self.parse_trades(response['trades'], market, since, limit)
async def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
await self.load_markets()
currency = self.currency(code)
response = await self.privatePostWithdraw(self.extend({
'currency': currency['id'],
'amount': amount,
'address': address, # Address must exist in you AddressBook in security settings
}, params))
return {
'info': response,
'id': None,
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
prefix = (api + '/') if (api == 'private') else ''
url = self.urls['api'][api] + self.version + '/1/' + prefix + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if api == 'public':
if query:
url += '?' + self.urlencode(query)
else:
self.check_required_credentials()
nonce = self.nonce()
request = {'nonce': nonce}
body = self.urlencode(self.extend(request, query))
signature = self.hmac(self.encode(body), self.encode(self.secret), hashlib.sha512)
headers = {
'Key': self.apiKey,
'Sign': signature,
'Content-Type': 'application/x-www-form-urlencoded',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
async def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = await self.fetch2(path, api, method, params, headers, body)
if 'result' in response:
result = response['result']
message = self.id + ' ' + self.json(response)
if result is None:
raise ExchangeError(message)
if isinstance(result, basestring):
if result != 'true':
raise ExchangeError(message)
elif not result:
raise ExchangeError(message)
return response
| |
pipelines.py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymongo
import datetime
import logging
from scrapy.conf import settings
import sys
reload(sys)
sys.setdefaultencoding('utf8')
class JobcrawlerPipeline(object):
def process_item(self, item, spider):
return item
educations = ("不限","大专","本科","硕士","博士")
#修正学历 有些职位中的学历明显不一致。需要修正
def clean_education(edu,body):
if edu not in educations:
for i in educations:
if i in body:
edu = i
else:
edu = '不限'
return edu
def clear_salary(salary):
res = salary.split("-")
temp = []
for x in res:
temp.append(int(x.upper().replace("K"," "))*1000)
result = {
"min":temp[0],
"max":temp[1],
"avg":int((temp[0]+temp[1])/2)
}
return result
def clear_time(time):
now_year = datetime.datetime.now().year
if '发布于' in time:
time = time.replace("发布于", str(now_year)+"-")
time = time.replace("月", "-")
time = time.replace("日", "")
if time.find("昨天") > 0:
time = str(datetime.date.today() - datetime.timedelta(days=1))
elif time.find(":") > 0:
time = str(datetime.date.today())
return time
def clear_position(name):
data = name.split(" ")
name = data[0]
work_year = data[-2]
educational = d
|
in name.upper():
return False
return True
#处理直聘网数据
class ZhipinPipeline(object):
def process_item(self, item, spider):
print '%s, %s' % ("pipeline item is:", item)
client = pymongo.MongoClient(host="127.0.0.1", port=27017)
db = client['job']
collection = db['position']
item['salary'] = clear_salary(item['salary'])
#UnicodeDecodeError: 'ascii' codec can't decode byte 0xe5 in position 0: ordinal not in range(128)
item['create_time'] = clear_time(item['create_time'])
#item['educational'] = clean_education(item['educational'],item['body'])
is_php = clean_name(item['position_name'])
if is_php is True:
collection.insert(dict(item))
client.close()
return item
|
ata[-1]
return name,work_year,educational
#判断PHP是否在职位名称中,不在就过滤掉。
#jd中含有php不参考,因为很多jd中都乱写
def clean_name(name):
if "PHP" not
|
ed25519blake2b.go
|
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package ed25519 implements the Ed25519 signature algorithm. See
// https://ed25519.cr.yp.to/.
//
// These functions are also compatible with the “Ed25519” function defined in
// RFC 8032. However, unlike RFC 8032's formulation, this package's private key
// representation includes a public key suffix to make multiple signing
// operations with the same key more efficient. This package refers to the RFC
// 8032 private key as the “seed”.
package ed25519blake2b
// This code is a port of the public domain, “ref10” implementation of ed25519
// from SUPERCOP.
import (
"bytes"
"crypto"
cryptorand "crypto/rand"
"errors"
"io"
"strconv"
"golang.org/x/crypto/blake2b"
"github.com/coingate/go-crypto/ed25519blake2b/internal/edwards25519"
)
const (
// PublicKeySize is the size, in bytes, of public keys as used in this package.
PublicKeySize = 32
// PrivateKeySize is the size, in bytes, of private keys as used in this package.
PrivateKeySize = 64
// SignatureSize is the size, in bytes, of signatures generated and verified by this package.
SignatureSize = 64
// SeedSize is the size, in bytes, of private key seeds. These are the private key representations used by RFC 8032.
SeedSize = 32
)
// PublicKey is the type of Ed25519 public keys.
type PublicKey []byte
// PrivateKey is the type of Ed25519 private keys. It implements crypto.Signer.
type PrivateKey []byte
// Public returns the PublicKey corresponding to priv.
func (priv PrivateKey) Public() crypto.PublicKey {
publicKey := make([]byte, PublicKeySize)
copy(publicKey, priv[32:])
return PublicKey(publicKey)
}
// Seed returns the private key seed corresponding to priv. It is provided for
// interoperability with RFC 8032. RFC 8032's private keys correspond to seeds
// in this package.
func (priv PrivateKey) Seed() []byte {
seed := make([]byte, SeedSize)
copy(seed, priv[:32])
return seed
}
// Sign signs the given message with priv.
// Ed25519 performs two passes over messages to be signed and therefore cannot
// handle pre-hashed messages. Thus opts.HashFunc() must return zero to
// indicate the message hasn't been hashed. This can be achieved by passing
// crypto.Hash(0) as the value for opts.
func (priv PrivateKey) Sign(rand io.Reader, message []byte, opts crypto.SignerOpts) (signature []byte, err error) {
if opts.HashFunc() != crypto.Hash(0) {
return nil, errors.New("ed25519: cannot sign hashed message")
}
return Sign(priv, message), nil
}
// GenerateKey generates a public/private key pair using entropy from rand.
// If rand is nil, crypto/rand.Reader will be used.
func GenerateKey(rand io.Reader) (PublicKey, PrivateKey, error) {
if rand == nil {
rand = cryptorand.Reader
}
seed := make([]byte, SeedSize)
if _, err := io.ReadFull(rand, seed); err != nil {
return nil, nil, err
}
privateKey := NewKeyFromSeed(seed)
publicKey := make([]byte, PublicKeySize)
copy(publicKey, privateKey[32:])
return publicKey, privateKey, nil
}
// NewKeyFromSeed calculates a private key from a seed. It will panic if
// len(seed) is not SeedSize. This function is provided for interoperability
// with RFC 8032. RFC 8032's private keys correspond to seeds in this
// package.
func NewKeyFromSeed(seed []byte) PrivateKey {
if l := l
|
gns the message with privateKey and returns a signature. It will
// panic if len(privateKey) is not PrivateKeySize.
func Sign(privateKey PrivateKey, message []byte) []byte {
if l := len(privateKey); l != PrivateKeySize {
panic("ed25519: bad private key length: " + strconv.Itoa(l))
}
h, _ := blake2b.New512(nil)
h.Write(privateKey[:32])
var digest1, messageDigest, hramDigest [64]byte
var expandedSecretKey [32]byte
h.Sum(digest1[:0])
copy(expandedSecretKey[:], digest1[:])
expandedSecretKey[0] &= 248
expandedSecretKey[31] &= 63
expandedSecretKey[31] |= 64
h.Reset()
h.Write(digest1[32:])
h.Write(message)
h.Sum(messageDigest[:0])
var messageDigestReduced [32]byte
edwards25519.ScReduce(&messageDigestReduced, &messageDigest)
var R edwards25519.ExtendedGroupElement
edwards25519.GeScalarMultBase(&R, &messageDigestReduced)
var encodedR [32]byte
R.ToBytes(&encodedR)
h.Reset()
h.Write(encodedR[:])
h.Write(privateKey[32:])
h.Write(message)
h.Sum(hramDigest[:0])
var hramDigestReduced [32]byte
edwards25519.ScReduce(&hramDigestReduced, &hramDigest)
var s [32]byte
edwards25519.ScMulAdd(&s, &hramDigestReduced, &expandedSecretKey, &messageDigestReduced)
signature := make([]byte, SignatureSize)
copy(signature[:], encodedR[:])
copy(signature[32:], s[:])
return signature
}
// Verify reports whether sig is a valid signature of message by publicKey. It
// will panic if len(publicKey) is not PublicKeySize.
func Verify(publicKey PublicKey, message, sig []byte) bool {
if l := len(publicKey); l != PublicKeySize {
panic("ed25519: bad public key length: " + strconv.Itoa(l))
}
if len(sig) != SignatureSize || sig[63]&224 != 0 {
return false
}
var A edwards25519.ExtendedGroupElement
var publicKeyBytes [32]byte
copy(publicKeyBytes[:], publicKey)
if !A.FromBytes(&publicKeyBytes) {
return false
}
edwards25519.FeNeg(&A.X, &A.X)
edwards25519.FeNeg(&A.T, &A.T)
h, _ := blake2b.New512(nil)
h.Write(sig[:32])
h.Write(publicKey[:])
h.Write(message)
var digest [64]byte
h.Sum(digest[:0])
var hReduced [32]byte
edwards25519.ScReduce(&hReduced, &digest)
var R edwards25519.ProjectiveGroupElement
var s [32]byte
copy(s[:], sig[32:])
// https://tools.ietf.org/html/rfc8032#section-5.1.7 requires that s be in
// the range [0, order) in order to prevent signature malleability.
if !edwards25519.ScMinimal(&s) {
return false
}
edwards25519.GeDoubleScalarMultVartime(&R, &hReduced, &A, &s)
var checkR [32]byte
R.ToBytes(&checkR)
return bytes.Equal(sig[:32], checkR[:])
}
|
en(seed); l != SeedSize {
panic("ed25519: bad seed length: " + strconv.Itoa(l))
}
digest := blake2b.Sum512(seed)
digest[0] &= 248
digest[31] &= 127
digest[31] |= 64
var A edwards25519.ExtendedGroupElement
var hBytes [32]byte
copy(hBytes[:], digest[:])
edwards25519.GeScalarMultBase(&A, &hBytes)
var publicKeyBytes [32]byte
A.ToBytes(&publicKeyBytes)
privateKey := make([]byte, PrivateKeySize)
copy(privateKey, seed)
copy(privateKey[32:], publicKeyBytes[:])
return privateKey
}
// Sign si
|
commands.ts
|
import { client } from '../../..';
import { Time } from '../../constants';
import { BotCommand } from '../../structures/BotCommand';
import { FastifyServer } from '../types';
export const commandsRoute = (server: FastifyServer) =>
server.route({
method: 'GET',
url: '/commands',
async handler(_, reply) {
const commands = client.commands.array() as BotCommand[];
reply.header(
'Cache-Control',
`public, max-age=${(Time.Minute * 5) / 1000}, s-maxage=${(Time.Minute * 5) / 1000}`
);
reply.send(
commands
.filter(
c =>
typeof c.description === 'string' &&
c.description.length > 1 &&
c.permissionLevel < 9
)
.map((cmd: BotCommand) => ({
name: cmd.name,
desc: cmd.description,
examples: cmd.examples,
permissionLevel: cmd.permissionLevel,
aliases: cmd.aliases,
perkTier: cmd.perkTier,
flags: cmd.categoryFlags
}))
.sort((a, b) => a.name.localeCompare(b.name))
|
);
}
});
| |
mythicalMysfitsService.py
|
from flask import Flask, jsonify, json, Response, request
from flask_cors import CORS
import mysfitsTableClient
# A very basic API created using Flask that has two possible routes for requests.
app = Flask(__name__)
app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False
CORS(app)
# The service basepath has a short response just to ensure that healthchecks
# sent to the service root will receive a healthy response.
@app.route("/")
def healthCheckResponse():
return jsonify({"message" : "Nothing here, used for health check. Try /mysfits instead."})
# Returns the data for all of the Mysfits to be displayed on
# the website. If no filter query string is provided, all mysfits are retrived
# and returned. If a querystring filter is provided, only those mysfits are queried.
@app.route("/mysfits")
def getMysfits():
|
# Run the service on the local server it has been deployed to,
# listening on port 8080.
if __name__ == "__main__":
app.run(host="0.0.0.0", port=8080)
|
filterCategory = request.args.get('filter')
if filterCategory:
filterValue = request.args.get('value')
queryParam = {
'filter': filterCategory,
'value': filterValue
}
# a filter query string was found, query only for those mysfits.
serviceResponse = mysfitsTableClient.queryMysfits(queryParam)
else:
# no filter was found, retrieve all mysfits.
serviceResponse = mysfitsTableClient.getAllMysfits()
flaskResponse = Response(serviceResponse)
flaskResponse.headers["Content-Type"] = "application/json"
return flaskResponse
|
cons.py
|
# -*- coding: utf-8 -*-
"""
basic constants and utility functions
"""
import datetime as dt
import os
import time
import logging
import inspect
from decimal import Decimal
import requests
from functools import wraps
from simplejson.errors import JSONDecodeError
import pandas as pd
from pyecharts.options import (
AxisOpts,
DataZoomOpts,
LegendOpts,
TooltipOpts,
VisualMapOpts,
)
from numpy import sqrt
from scipy import optimize
from xalpha import __path__
from .exceptions import HttpStatusError
logger = logging.getLogger(__name__)
# date obj of today
# today = lambda: dt.datetime.combine(dt.date.today(), dt.time.min)
tz_bj = dt.timezone(dt.timedelta(hours=8))
def today_obj():
"""
today obj in beijing timezone with no tzinfo
:return: datetime.datetime
"""
now = dt.datetime.now(tz=tz_bj)
return now.replace(hour=0, minute=0, second=0, microsecond=0).replace(tzinfo=None)
# datetime obj for yesterdate date with time set to be 0:0:0
yesterdayobj = lambda: (dt.datetime.now(tz_bj).replace(tzinfo=None) - dt.timedelta(1))
# string for yesterday, only used for indexinfo url
yesterday = lambda: dt.datetime.strftime(yesterdayobj(), "%Y%m%d")
# string for yesterday with dash
yesterdaydash = lambda: dt.datetime.strftime(yesterdayobj(), "%Y-%m-%d")
# list: all the trade date of domestic stock market in the form of string
caldate = pd.read_csv(os.path.join(__path__[0], "caldate.csv"))
opendate = list(caldate[caldate["is_open"] == 1]["cal_date"])
# opendate = list(ts.trade_cal()[ts.trade_cal()['isOpen']==1]['calendarDate'])
opendate_set = set(opendate) # for speed checking?
# fund code list which always round down for the purchase share approximation
droplist = ["003318", "000311", "000601", "009989"]
sqrt_days_in_year = sqrt(250.0)
def calendar_selfcheck():
# 国内链接 githubusercontent.com 大概率存在问题,因此设计成联网自动更新日历大概率无用。
# 也许之后考虑一些较稳定的第三方资源托管服务
current_year = dt.datetime.now().year
if str(current_year) != opendate[-1][:4]:
logger.warning(
"Please update xalpha via `pip install -U xalpha` to keep the trade calendar up-to-date"
)
print("请更新 xalpha 版本以更新最新年份的 A 股交易日历, 否则将可能无法正确获取和处理最新的基金净值")
calendar_selfcheck()
region_trans = {
"瑞士": "CH",
"日本": "JP",
"韩国": "KR",
"美国": "US",
"香港": "HK",
"中国香港": "HK",
"德国": "DE",
"英国": "UK",
"法国": "FR",
"中国": "CN",
"墨西哥": "MX",
"澳大利亚": "AU",
"新加坡": "SG",
"印度": "IN",
"台湾": "TW",
"中国台湾": "TW",
}
# extract from xa.misc.get_tdx_holidays
holidays = {
"AU": [
"2020-01-01",
"2020-01-27",
"2020-04-10",
"2020-04-13",
"2020-04-25",
"2020-06-08",
"2020-12-24",
"2020-12-25",
"2020-12-28",
"2020-12-31",
"2021-01-01",
"2021-01-26",
"2021-04-02",
"2021-04-05",
"2021-06-14",
"2021-12-24",
"2021-12-27",
"2021-12-28",
"2021-12-31",
],
"CH": [
"2020-01-01",
"2020-01-02",
"2020-04-10",
"2020-04-13",
"2020-05-01",
"2020-05-21",
"2020-06-01",
"2020-12-24",
"2020-12-25",
"2020-12-31",
"2021-01-01",
"2021-04-02",
"2021-04-05",
"2021-05-13",
"2021-05-24",
"2021-12-24",
"2021-12-31",
],
"CN": [
"2020-01-01",
"2020-01-24",
"2020-01-27",
"2020-01-28",
"2020-01-29",
"2020-01-30",
"2020-01-31",
"2020-04-06",
"2020-05-01",
"2020-05-04",
"2020-05-05",
"2020-06-25",
"2020-06-26",
"2020-10-01",
"2020-10-02",
"2020-10-05",
"2020-10-06",
"2020-10-07",
"2020-10-08",
"2021-01-01",
"2021-02-11",
"2021-02-12",
"2021-02-15",
"2021-02-16",
"2021-02-17",
"2021-04-05",
"2021-05-03",
"2021-05-04",
"2021-05-05",
"2021-06-14",
"2021-09-20",
"2021-09-21",
"2021-10-01",
"2021-10-04",
"2021-10-05",
"2021-10-06",
"2021-10-07",
],
"DE": [
"2020-01-01",
"2020-04-10",
"2020-04-13",
"2020-05-01",
"2020-06-01",
"2020-12-24",
"2020-12-25",
"2020-12-31",
"2021-01-01",
"2021-04-02",
"2021-04-05",
"2021-05-24",
"2021-12-24",
"2021-12-31",
],
"FR": [
"2020-01-01",
"2020-04-10",
"2020-04-13",
"2020-05-01",
"2020-12-24",
"2020-12-25",
"2020-12-31",
"2021-01-01",
"2021-04-02",
"2021-04-05",
"2021-12-24",
"2021-12-31",
],
"HK": [
"2020-01-01",
"2020-01-27",
"2020-01-28",
"2020-04-10",
"2020-04-13",
"2020-04-30",
"2020-05-01",
"2020-06-25",
"2020-07-01",
"2020-10-01",
"2020-10-02",
"2020-10-26",
"2020-12-25",
"2021-01-01",
"2021-02-11",
"2021-02-12",
"2021-02-15",
"2021-04-02",
"2021-04-05",
"2021-04-06",
"2021-05-19",
"2021-06-14",
"2021-07-01",
"2021-09-22",
"2021-10-01",
"2021-10-14",
"2021-12-24",
"2021-12-27",
"2021-12-31",
],
"IN": [
"2020-02-21",
"2020-03-10",
"2020-04-02",
"2020-04-06",
"2020-04-10",
"2020-04-14",
"2020-05-01",
"2020-05-25",
"2020-10-02",
"2020-11-16",
"2020-11-30",
"2020-12-25",
"2021-01-26",
"2021-03-11",
"2021-03-29",
"2021-04-02",
"2021-04-14",
"2021-04-21",
"2021-05-13",
"2021-07-20",
"2021-08-19",
"2021-09-10",
"2021-10-15",
"2021-11-04",
"2021-11-19",
],
"JP": [
"2020-01-01",
"2020-01-02",
"2020-01-03",
"2020-01-13",
"2020-02-11",
"2020-02-24",
"2020-03-20",
"2020-04-29",
"2020-05-04",
"2020-05-05",
"2020-05-06",
"2020-07-23",
"2020-07-24",
"2020-08-10",
"2020-09-21",
"2020-09-22",
"2020-11-03",
"2020-11-23",
"2020-12-31",
"2021-01-01",
"2021-01-11",
"2021-02-11",
"2021-02-23",
"2021-04-29",
"2021-05-03",
"2021-05-04",
"2021-05-05",
"2021-07-22",
"2021-07-23",
"2021-08-09",
"2021-09-20",
"2021-09-23",
"2021-11-03",
"2021-11-23",
"2021-12-31",
],
"KR": [
"2020-01-01",
"2020-01-24",
"2020-01-27",
"2020-04-30",
"2020-05-01",
"2020-05-05",
"2020-09-30",
"2020-10-01",
"2020-10-02",
"2020-10-09",
"2020-12-25",
"2020-12-31",
"2021-01-01",
"2021-02-11",
"2021-02-12",
"2021-03-01",
"2021-05-05",
"2021-05-19",
"2021-09-20",
"2021-09-21",
"2021-09-22",
"2021-12-31",
],
"SG": [
"2020-01-01",
"2020-01-24",
"2020-04-10",
"2020-05-01",
"2020-05-07",
"2020-05-21",
"2020-07-31",
"2020-08-10",
"2020-12-24",
"2020-12-25",
"2020-12-31",
"2021-01-01",
"2021-02-11",
"2021-02-12",
"2021-04-02",
"2021-05-13",
"2021-05-26",
"2021-07-20",
"2021-08-09",
"2021-11-04",
"2021-12-24",
"2021-12-31",
],
"TW": [
"2020-01-01",
"2020-01-21",
"2020-01-22",
"2020-01-23",
"2020-01-24",
"2020-01-27",
"2020-01-28",
"2020-01-29",
"2020-02-28",
"2020-04-02",
"2020-04-03",
"2020-05-01",
"2020-06-25",
"2020-06-26",
"2020-10-01",
"2020-10-02",
"2020-10-09",
"2021-01-01",
"2021-02-08",
"2021-02-09",
"2021-02-10",
"2021-02-11",
"2021-02-12",
"2021-02-15",
"2021-02-16",
"2021-03-01",
"2021-04-02",
"2021-04-05",
"2021-04-30",
"2021-06-14",
"2021-09-20",
"2021-09-21",
"2021-10-11",
"2021-12-31",
],
"UK": [
"2020-01-01",
"2020-04-10",
"2020-04-13",
"2020-05-08",
"2020-05-25",
"2020-08-31",
"2020-12-24",
"2020-12-25",
"2020-12-28",
"2020-12-31",
"2021-01-01",
"2021-01-01",
"2021-04-02",
"2021-04-05",
"2021-05-03",
"2021-05-31",
"2021-08-30",
"2021-12-24",
"2021-12-27",
"2021-12-28",
"2021-12-31",
"2022-01-03",
],
"US": [
"2020-01-01",
"2020-01-20",
"2020-02-17",
"2020-03-08",
"2020-04-10",
"2020-05-25",
"2020-07-03",
"2020-09-07",
"2020-11-01",
"2020-11-26",
"2020-11-27",
"2020-12-24",
"2020-12-25",
"2021-01-01",
"2021-01-01",
"2021-01-18",
"2021-02-15",
"2021-03-14",
"2021-04-02",
"2021-05-31",
"2021-07-05",
"2021-09-06",
"2021-11-07",
"2021-11-25",
"2021-11-26",
"2021-12-24",
],
}
connection_errors = (
HttpStatusError,
ConnectionResetError,
requests.exceptions.RequestException,
requests.exceptions.ConnectionError,
requests.exceptions.SSLError,
JSONDecodeError,
)
line_opts = {
"datazoom_opts": [
DataZoomOpts(is_show=True, type_="slider", range_start=50, range_end=100),
DataZoomOpts(
is_show=True,
type_="slider",
orient="vertical",
range_start=50,
range_end=100,
),
],
"tooltip_opts": TooltipOpts(
is_show=True, trigger="axis", trigger_on="mousemove", axis_pointer_type="cross"
),
}
heatmap_opts = {
"visualmap_opts": VisualMapOpts(
min_=-1, max_=1, orient="horizontal", pos_right="middle", pos_top="bottom"
)
}
# pie_opts = {
# "tooltip_opts": TooltipOpts(),
# "legend_opts": LegendOpts(orient="vertical", pos_left="left"),
# }
themeriver_opts = {
"xaxis_opts": AxisOpts(type_="time"),
"datazoom_opts": [DataZoomOpts(range_start=60, range_end=100)],
"tooltip_opts": TooltipOpts(trigger_on="mousemove", trigger="item"),
"legend_opts": LegendOpts(pos_top="top"),
}
def xnpv(rate, cashflows):
"""
give the current cash value based on future cashflows
:param rate: float, the preset year rate
:param cashflows: a list, in which each element is a tuple of the form (date, amount),
where date is a datetime object and amount is an integer or floating number.
Cash outflows (investments) are represented with negative amounts,
and cash inflows (returns) are positive amounts.
:returns: a single float value which is the NPV of the given cash flows
"""
chron_order = sorted(cashflows, key=lambda x: x[0])
t0 = chron_order[0][0]
return sum([cf / (1 + rate) ** ((t - t0).days / 365.0) for (t, cf) in chron_order])
def xirr(cashflows, guess=0.1):
"""
calculate the Internal Rate of Return of a series of cashflows at irregular intervals.
:param cashflows: a list, in which each element is a tuple of the form (date, amount),
where date is a datetime object and amount is an integer or floating number.
Cash outflows (investments) are represented with negative amounts,
and cash inflows (returns) are positive amounts.
:param guess: floating number, a guess at the xirr rate solution to be used
as a starting point for the numerical solution
:returns: the IRR as a single floating number
"""
return optimize.newton(lambda r: xnpv(r, cashflows), guess)
def myround(num, label=1):
"""
correct implementation of round with round half up, round to 2 decimals
:param num: the floating number, to be rounded
:param label: integer 1 or 2, 1 for round half up while 2 for always round down
:returns: the float number after rounding, with two decimals
"""
if label == 1:
res = float(
Decimal(str(num)).quantize(Decimal("0.01"), rounding="ROUND_HALF_UP")
)
elif (
label == 2
): # for jingshunchangcheng... who just omit the overflow share behind 2 decimal
res = float(Decimal(str(num)).quantize(Decimal("0.01"), rounding="ROUND_DOWN"))
return res
def convert_date(date):
"""
convert date into datetime object
:param date: string of form '2017-01-01' or datetime object
:returns: corresponding datetime object
"""
if isinstance(date, str):
return pd.Timestamp(date)
else:
return date
def _date_check(dtobj, check=False):
if not isinstance(dtobj, dt.datetime):
dtobj = dt.datetime.strptime(dtobj.replace("/", "").replace("-", ""), "%Y%m%d")
if check and (dtobj.year > dt.datetime.now().year or dtobj.year < 1991):
raise ValueError(
"date goes beyond market range: %s" % dtobj.strftime("%Y-%m-%d")
)
return dtobj
def next_onday(dtobj):
dtobj = _date_check(dtobj, check=True)
dtobj += dt.timedelta(1)
while dtobj.strftime("%Y-%m-%d") not in opendate_set:
dtobj += dt.timedelta(1)
return dtobj
def last_onday(dtobj):
dtobj = _date_check(dtobj, check=True)
dtobj -= dt.timedelta(1)
while dtobj.strftime("%Y-%m-%d") not in opendate_set:
dtobj -= dt.timedelta(1)
return dtobj
def avail_dates(dtlist, future=False):
"""
make every day in the list the next open day
:param dtlist: datetime obj list
:param future: bool, default False, indicating the latest day in the list is yesterday
:return: datetime obj list
"""
ndtlist = []
for d in dtlist:
if d.strftime("%Y-%m-%d") not in opendate_set:
nd = next_onday(d)
else:
nd = d
if future is False:
if (nd - yesterdayobj()).days > 0:
continue
ndtlist.append(nd)
return ndtlist
def scale_dict(d, scale=1, ulimit=100, dlimit=50, aim=None):
t = sum([v for _, v in d.items()])
if t * scale > ulimit:
scale = ulimit / t
elif t * scale < dlimit:
scale = dlimit / t
if aim:
scale = aim / t
for k, v in d.items():
d[k] = v * scale
return d
def _float(n):
try:
n = n.replace(",", "")
if n.endswith("K") or n.endswith("k"):
n = float(n[:-1]) * 1000
elif n.endswith("M") or n.endswith("m"):
n = float(n[:-1]) * 1000 * 1000
elif n.endswith("G") or n.endswith("g") or n.endswith("B") or n.endswith("b"):
n = float(n[:-1]) * 1000 * 1000 * 1000
elif n == "-":
logger.info("_float met -, taken as 0")
return 0
elif n.endswith("%"):
logger.info("_float met with %% as %s" % n)
return float(n[:-1]) / 100
except AttributeError:
pass
if not n:
logger.info("_
|
return float(n)
def reconnect(tries=5, timeout=12):
def robustify(f):
default_header = {
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,'
'application/signed-exchange;v=b3;q=0.9',
'accept-encoding': 'gzip, deflate, br',
'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6,ja;q=0.5',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/89.0.4389.114 Safari/537.36 Edg/89.0.774.76',
}
@wraps(f)
def wrapper(*args, **kws):
import xalpha.provider as xp
if getattr(xp, "proxy", None):
kws["proxies"] = {"http": xp.proxy, "https": xp.proxy}
kws["timeout"] = timeout
logger.debug("Using proxy %s" % xp.proxy)
if args:
url = args[0]
else:
url = kws.get("url", "")
headers = kws.get("headers", {})
if len(headers) == 0:
headers.update(default_header)
kws["headers"] = headers
for count in range(tries):
try:
logger.debug("Fetching url: %s . Inside function `%s`" % (url, inspect.stack()[1].function))
r = f(*args, **kws)
if getattr(r, "status_code", 200) != 200: # in case r is a json dict
raise HttpStatusError
return r
except connection_errors as e:
logger.warning("Fails at fetching url: %s. Try again." % url)
if count == tries - 1:
logger.error("Still wrong at fetching url: %s. after %s tries." % (url, tries))
logger.error("Fails due to %s" % e.args[0])
raise e
time.sleep(0.5 * count)
return wrapper
return robustify
rget = reconnect()(requests.get)
rpost = reconnect()(requests.post)
@reconnect()
def rget_json(*args, **kws):
r = requests.get(*args, **kws)
return r.json()
@reconnect()
def rpost_json(*args, **kws):
r = requests.post(*args, **kws)
return r.json()
# def rget(*args, **kws):
# tries = 5
# for count in range(tries):
# try:
# r = requests.get(*args, **kws)
# return r
# except connection_errors as e:
# if count == tries - 1:
# print(*args, sep="\n")
# print("still wrong after several tries")
# raise e
# time.sleep(0.5*count)
#
#
# def rget_json(*args, **kws):
# tries = 5
# for count in range(tries):
# try:
# r = requests.get(*args, **kws)
# return r.json()
# except connection_errors as e:
# if count == tries - 1:
# print(*args, sep="\n")
# print("still wrong after several tries")
# raise e
# time.sleep(0.5*count)
#
#
# def rpost(*args, **kws):
# tries = 5
# for count in range(tries):
# try:
# r = requests.post(*args, **kws)
# return r
# except connection_errors as e:
# if count == tries - 1:
# print(*args, sep="\n")
# print("still wrong after several tries")
# raise e
# time.sleep(0.5*count)
#
#
# def rpost_json(*args, **kws):
# tries = 5
# for count in range(tries):
# try:
# r = requests.post(*args, **kws)
# return r.json()
# except connection_errors as e:
# if count == tries - 1:
# print(*args, sep="\n")
# print("still wrong after several tries")
# raise e
# time.sleep(0.5*count)
## simple subsitution for holdings.py
holdings = {}
holdings["501018"] = {
"etfs/etfs-brent-1mth-uk": 17.51,
"etfs/etfs-brent-crude": 15.04,
"etfs/etfs-crude-oil": 7.34,
"etfs/ipath-series-b-sp-gsci-crd-oil-tr": 0.06,
"etfs/powershares-db-oil-fund": 11.6,
"etfs/ubs-cmci-oil-sf-usd": 8.68,
"etfs/united-states-12-month-oil": 8.14,
"etfs/united-states-brent-oil-fund-lp": 15.42,
"etfs/united-states-oil-fund": 9.63,
}
holdings["501018rt"] = {
"commodities/brent-oil": {"weight": 49, "time": -1},
"commodities/crude-oil": {"weight": 45, "time": 4},
}
|
float met with None as input arguments")
return 0.0
|
app.module.ts
|
import { Module } from '@nestjs/common';
import { AppController } from './app.controller';
import { AppService } from './app.service';
import { ItemsModule } from './items/items.module';
import { MongooseModule } from '@nestjs/mongoose';
import { ConfigModule } from '@nestjs/config';
import { UsersModule } from './users/users.module';
import { AuthModule } from './auth/auth.module';
import config from './config/keys';
@Module({
imports: [
ConfigModule.forRoot({
isGlobal: true,
}),
ItemsModule,
MongooseModule.forRoot(config.mongoURI),
UsersModule,
AuthModule,
|
})
export class AppModule {}
|
],
controllers: [AppController],
providers: [AppService],
|
test_install_basic.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function
import os
import pytest
from flaky import flaky
from pipenv._compat import Path, TemporaryDirectory
from pipenv.utils import temp_environ
from pipenv.vendor import delegator
@pytest.mark.setup
@pytest.mark.basic
@pytest.mark.install
def test_basic_setup(PipenvInstance):
with PipenvInstance() as p:
with PipenvInstance(pipfile=False) as p:
c = p.pipenv("install requests")
assert c.return_code == 0
assert "requests" in p.pipfile["packages"]
assert "requests" in p.lockfile["default"]
assert "chardet" in p.lockfile["default"]
assert "idna" in p.lockfile["default"]
assert "urllib3" in p.lockfile["default"]
assert "certifi" in p.lockfile["default"]
@flaky
@pytest.mark.basic
@pytest.mark.install
@pytest.mark.skip_osx
def
|
(PipenvInstance):
with PipenvInstance() as p:
c = p.pipenv("install requests")
assert c.return_code == 0
assert "requests" in p.pipfile["packages"]
assert "requests" in p.lockfile["default"]
assert "chardet" in p.lockfile["default"]
assert "idna" in p.lockfile["default"]
assert "urllib3" in p.lockfile["default"]
assert "certifi" in p.lockfile["default"]
@flaky
@pytest.mark.basic
@pytest.mark.install
def test_mirror_install(PipenvInstance):
with temp_environ(), PipenvInstance(chdir=True) as p:
mirror_url = os.environ.pop(
"PIPENV_TEST_INDEX", "https://pypi.python.org/simple"
)
assert "pypi.org" not in mirror_url
# This should sufficiently demonstrate the mirror functionality
# since pypi.org is the default when PIPENV_TEST_INDEX is unset.
c = p.pipenv("install requests --pypi-mirror {0}".format(mirror_url))
assert c.return_code == 0
# Ensure the --pypi-mirror parameter hasn't altered the Pipfile or Pipfile.lock sources
assert len(p.pipfile["source"]) == 1
assert len(p.lockfile["_meta"]["sources"]) == 1
assert "https://pypi.org/simple" == p.pipfile["source"][0]["url"]
assert "https://pypi.org/simple" == p.lockfile["_meta"]["sources"][0]["url"]
assert "requests" in p.pipfile["packages"]
assert "requests" in p.lockfile["default"]
assert "chardet" in p.lockfile["default"]
assert "idna" in p.lockfile["default"]
assert "urllib3" in p.lockfile["default"]
assert "certifi" in p.lockfile["default"]
@flaky
@pytest.mark.basic
@pytest.mark.install
@pytest.mark.needs_internet
def test_bad_mirror_install(PipenvInstance):
with temp_environ(), PipenvInstance(chdir=True) as p:
# This demonstrates that the mirror parameter is being used
os.environ.pop("PIPENV_TEST_INDEX", None)
c = p.pipenv("install requests --pypi-mirror https://pypi.example.org")
assert c.return_code != 0
@pytest.mark.lock
@pytest.mark.complex
@pytest.mark.skip(reason="Does not work unless you can explicitly install into py2")
def test_complex_lock(PipenvInstance):
with PipenvInstance() as p:
c = p.pipenv("install apscheduler")
assert c.return_code == 0
assert "apscheduler" in p.pipfile["packages"]
assert "funcsigs" in p.lockfile[u"default"]
assert "futures" in p.lockfile[u"default"]
@flaky
@pytest.mark.dev
@pytest.mark.run
def test_basic_dev_install(PipenvInstance):
with PipenvInstance() as p:
c = p.pipenv("install requests --dev")
assert c.return_code == 0
assert "requests" in p.pipfile["dev-packages"]
assert "requests" in p.lockfile["develop"]
assert "chardet" in p.lockfile["develop"]
assert "idna" in p.lockfile["develop"]
assert "urllib3" in p.lockfile["develop"]
assert "certifi" in p.lockfile["develop"]
c = p.pipenv("run python -m requests.help")
assert c.return_code == 0
@flaky
@pytest.mark.dev
@pytest.mark.basic
@pytest.mark.install
def test_install_without_dev(PipenvInstance):
"""Ensure that running `pipenv install` doesn't install dev packages"""
with PipenvInstance(chdir=True) as p:
with open(p.pipfile_path, "w") as f:
contents = """
[packages]
six = "*"
[dev-packages]
tablib = "*"
""".strip()
f.write(contents)
c = p.pipenv("install")
assert c.return_code == 0
assert "six" in p.pipfile["packages"]
assert "tablib" in p.pipfile["dev-packages"]
assert "six" in p.lockfile["default"]
assert "tablib" in p.lockfile["develop"]
c = p.pipenv('run python -c "import tablib"')
assert c.return_code != 0
c = p.pipenv('run python -c "import six"')
assert c.return_code == 0
@flaky
@pytest.mark.basic
@pytest.mark.install
def test_install_without_dev_section(PipenvInstance):
with PipenvInstance() as p:
with open(p.pipfile_path, "w") as f:
contents = """
[packages]
six = "*"
""".strip()
f.write(contents)
c = p.pipenv("install")
assert c.return_code == 0
assert "six" in p.pipfile["packages"]
assert p.pipfile.get("dev-packages", {}) == {}
assert "six" in p.lockfile["default"]
assert p.lockfile["develop"] == {}
c = p.pipenv('run python -c "import six"')
assert c.return_code == 0
@flaky
@pytest.mark.lock
@pytest.mark.extras
@pytest.mark.install
def test_extras_install(PipenvInstance):
with PipenvInstance(chdir=True) as p:
c = p.pipenv("install requests[socks]")
assert c.return_code == 0
assert "requests" in p.pipfile["packages"]
assert "extras" in p.pipfile["packages"]["requests"]
assert "requests" in p.lockfile["default"]
assert "chardet" in p.lockfile["default"]
assert "idna" in p.lockfile["default"]
assert "urllib3" in p.lockfile["default"]
assert "pysocks" in p.lockfile["default"]
@flaky
@pytest.mark.pin
@pytest.mark.basic
@pytest.mark.install
def test_windows_pinned_pipfile(PipenvInstance):
with PipenvInstance() as p:
with open(p.pipfile_path, "w") as f:
contents = """
[packages]
requests = "==2.19.1"
""".strip()
f.write(contents)
c = p.pipenv("install")
assert c.return_code == 0
assert "requests" in p.pipfile["packages"]
assert "requests" in p.lockfile["default"]
@flaky
@pytest.mark.basic
@pytest.mark.install
@pytest.mark.resolver
@pytest.mark.backup_resolver
def test_backup_resolver(PipenvInstance):
with PipenvInstance() as p:
with open(p.pipfile_path, "w") as f:
contents = """
[packages]
"ibm-db-sa-py3" = "==0.3.1-1"
""".strip()
f.write(contents)
c = p.pipenv("install")
assert c.return_code == 0
assert "ibm-db-sa-py3" in p.lockfile["default"]
@flaky
@pytest.mark.run
@pytest.mark.alt
def test_alternative_version_specifier(PipenvInstance):
with PipenvInstance() as p:
with open(p.pipfile_path, "w") as f:
contents = """
[packages]
requests = {version = "*"}
""".strip()
f.write(contents)
c = p.pipenv("install")
assert c.return_code == 0
assert "requests" in p.lockfile["default"]
assert "idna" in p.lockfile["default"]
assert "urllib3" in p.lockfile["default"]
assert "certifi" in p.lockfile["default"]
assert "chardet" in p.lockfile["default"]
c = p.pipenv('run python -c "import requests; import idna; import certifi;"')
assert c.return_code == 0
@flaky
@pytest.mark.run
@pytest.mark.alt
def test_outline_table_specifier(PipenvInstance):
with PipenvInstance() as p:
with open(p.pipfile_path, "w") as f:
contents = """
[packages.requests]
version = "*"
""".strip()
f.write(contents)
c = p.pipenv("install")
assert c.return_code == 0
assert "requests" in p.lockfile["default"]
assert "idna" in p.lockfile["default"]
assert "urllib3" in p.lockfile["default"]
assert "certifi" in p.lockfile["default"]
assert "chardet" in p.lockfile["default"]
c = p.pipenv('run python -c "import requests; import idna; import certifi;"')
assert c.return_code == 0
@pytest.mark.bad
@pytest.mark.basic
@pytest.mark.install
def test_bad_packages(PipenvInstance):
with PipenvInstance() as p:
c = p.pipenv("install NotAPackage")
assert c.return_code > 0
@pytest.mark.lock
@pytest.mark.extras
@pytest.mark.install
@pytest.mark.requirements
def test_requirements_to_pipfile(PipenvInstance, pypi):
with PipenvInstance(pipfile=False, chdir=True) as p:
# Write a requirements file
with open("requirements.txt", "w") as f:
f.write("-i {}\nrequests[socks]==2.19.1\n".format(pypi.url))
c = p.pipenv("install")
assert c.return_code == 0
print(c.out)
print(c.err)
print(delegator.run("ls -l").out)
# assert stuff in pipfile
assert "requests" in p.pipfile["packages"]
assert "extras" in p.pipfile["packages"]["requests"]
# assert stuff in lockfile
assert "requests" in p.lockfile["default"]
assert "chardet" in p.lockfile["default"]
assert "idna" in p.lockfile["default"]
assert "urllib3" in p.lockfile["default"]
assert "pysocks" in p.lockfile["default"]
@pytest.mark.basic
@pytest.mark.install
@pytest.mark.skip_osx
@pytest.mark.requirements
def test_skip_requirements_when_pipfile(PipenvInstance):
"""Ensure requirements.txt is NOT imported when
1. We do `pipenv install [package]`
2. A Pipfile already exists when we run `pipenv install`.
"""
with PipenvInstance(chdir=True) as p:
with open("requirements.txt", "w") as f:
f.write("requests==2.18.1\n")
c = p.pipenv("install six")
assert c.return_code == 0
with open(p.pipfile_path, "w") as f:
contents = """
[packages]
six = "*"
fake_package = "<0.12"
""".strip()
f.write(contents)
c = p.pipenv("install")
assert c.ok
assert "fake_package" in p.pipfile["packages"]
assert "fake-package" in p.lockfile["default"]
assert "six" in p.pipfile["packages"]
assert "six" in p.lockfile["default"]
assert "requests" not in p.pipfile["packages"]
assert "requests" not in p.lockfile["default"]
@pytest.mark.cli
@pytest.mark.clean
def test_clean_on_empty_venv(PipenvInstance):
with PipenvInstance() as p:
c = p.pipenv("clean")
assert c.return_code == 0
@pytest.mark.basic
@pytest.mark.install
def test_install_does_not_extrapolate_environ(PipenvInstance):
"""Ensure environment variables are not expanded in lock file.
"""
with temp_environ(), PipenvInstance(chdir=True) as p:
# os.environ["PYPI_URL"] = pypi.url
os.environ["PYPI_URL"] = p.pypi
with open(p.pipfile_path, "w") as f:
f.write(
"""
[[source]]
url = '${PYPI_URL}/simple'
verify_ssl = true
name = 'mockpi'
"""
)
# Ensure simple install does not extrapolate.
c = p.pipenv("install")
assert c.return_code == 0
assert p.pipfile["source"][0]["url"] == "${PYPI_URL}/simple"
assert p.lockfile["_meta"]["sources"][0]["url"] == "${PYPI_URL}/simple"
# Ensure package install does not extrapolate.
c = p.pipenv("install six")
assert c.return_code == 0
assert p.pipfile["source"][0]["url"] == "${PYPI_URL}/simple"
assert p.lockfile["_meta"]["sources"][0]["url"] == "${PYPI_URL}/simple"
@pytest.mark.basic
@pytest.mark.editable
@pytest.mark.badparameter
@pytest.mark.install
def test_editable_no_args(PipenvInstance):
with PipenvInstance() as p:
c = p.pipenv("install -e")
assert c.return_code != 0
assert "Error: -e option requires an argument" in c.err
@pytest.mark.basic
@pytest.mark.install
@pytest.mark.virtualenv
def test_install_venv_project_directory(PipenvInstance):
"""Test the project functionality during virtualenv creation.
"""
with PipenvInstance(chdir=True) as p:
with temp_environ(), TemporaryDirectory(
prefix="pipenv-", suffix="temp_workon_home"
) as workon_home:
os.environ["WORKON_HOME"] = workon_home.name
if "PIPENV_VENV_IN_PROJECT" in os.environ:
del os.environ["PIPENV_VENV_IN_PROJECT"]
c = p.pipenv("install six")
assert c.return_code == 0
venv_loc = None
for line in c.err.splitlines():
if line.startswith("Virtualenv location:"):
venv_loc = Path(line.split(":", 1)[-1].strip())
assert venv_loc is not None
assert venv_loc.joinpath(".project").exists()
@pytest.mark.cli
@pytest.mark.deploy
@pytest.mark.system
def test_system_and_deploy_work(PipenvInstance):
with PipenvInstance(chdir=True) as p:
c = p.pipenv("install tablib")
assert c.return_code == 0
c = p.pipenv("--rm")
assert c.return_code == 0
c = delegator.run("virtualenv .venv")
assert c.return_code == 0
c = p.pipenv("install --system --deploy")
assert c.return_code == 0
c = p.pipenv("--rm")
assert c.return_code == 0
Path(p.pipfile_path).write_text(
u"""
[packages]
tablib = "*"
""".strip()
)
c = p.pipenv("install --system")
assert c.return_code == 0
@pytest.mark.basic
@pytest.mark.install
def test_install_creates_pipfile(PipenvInstance):
with PipenvInstance(chdir=True) as p:
if os.path.isfile(p.pipfile_path):
os.unlink(p.pipfile_path)
if "PIPENV_PIPFILE" in os.environ:
del os.environ["PIPENV_PIPFILE"]
assert not os.path.isfile(p.pipfile_path)
c = p.pipenv("install")
assert c.return_code == 0
assert os.path.isfile(p.pipfile_path)
@pytest.mark.basic
@pytest.mark.install
def test_install_non_exist_dep(PipenvInstance):
with PipenvInstance(chdir=True) as p:
c = p.pipenv("install dateutil")
assert not c.ok
assert "dateutil" not in p.pipfile["packages"]
@pytest.mark.basic
@pytest.mark.install
def test_install_package_with_dots(PipenvInstance):
with PipenvInstance(chdir=True) as p:
c = p.pipenv("install backports.html")
assert c.ok
assert "backports.html" in p.pipfile["packages"]
@pytest.mark.basic
@pytest.mark.install
def test_rewrite_outline_table(PipenvInstance):
with PipenvInstance(chdir=True) as p:
with open(p.pipfile_path, 'w') as f:
contents = """
[packages]
six = {version = "*"}
[packages.requests]
version = "*"
extras = ["socks"]
""".strip()
f.write(contents)
c = p.pipenv("install flask")
assert c.return_code == 0
with open(p.pipfile_path) as f:
contents = f.read()
assert "[packages.requests]" not in contents
assert 'six = {version = "*"}' in contents
assert 'requests = {version = "*"' in contents
assert 'flask = "*"' in contents
|
test_basic_install
|
linker.rs
|
use super::module::ModuleIndex;
use std::collections::HashMap;
use std::fmt;
#[derive(PartialEq, Eq, Hash)]
pub struct GlobalAddress<T>(usize, std::marker::PhantomData<T>);
impl<T> Clone for GlobalAddress<T> {
fn clone(&self) -> Self {
Self(self.0, self.1)
}
}
impl<T> Copy for GlobalAddress<T> {}
impl<T> fmt::Debug for GlobalAddress<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "GlobalAddress({})", self.0)
}
}
#[derive(PartialEq, Eq, Hash)]
pub struct LinkableAddress<T>(ModuleIndex, pub(crate) usize, std::marker::PhantomData<T>);
impl<T> LinkableAddress<T> {
pub fn new_unsafe(module: ModuleIndex, index: usize) -> Self {
Self(module, index, std::marker::PhantomData)
}
pub fn module_index(&self) -> ModuleIndex {
self.0
}
}
impl<T> Clone for LinkableAddress<T> {
fn clone(&self) -> Self {
Self::new_unsafe(self.0, self.1)
}
}
impl<T> Copy for LinkableAddress<T> {}
impl<T> fmt::Debug for LinkableAddress<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "{:?}, func_index: {}", self.0, self.1)
}
}
pub struct LinkableCollection<T> {
items: Vec<T>,
item_addrs_by_module: HashMap<ModuleIndex, Vec<usize>>,
}
impl<T> LinkableCollection<T> {
pub fn new() -> Self {
Self {
items: Vec::new(),
item_addrs_by_module: HashMap::new(),
}
}
pub fn resolve(&self, address: LinkableAddress<T>) -> Option<GlobalAddress<T>> {
let raw_address = self.item_addrs_by_module.get(&address.0)?.get(address.1)?;
Some(GlobalAddress(*raw_address, std::marker::PhantomData))
}
|
pub fn link(&mut self, source: GlobalAddress<T>, dist: ModuleIndex) -> LinkableAddress<T> {
let index = self
.item_addrs_by_module
.get(&dist)
.map(|c| c.len())
.unwrap_or(0);
self.item_addrs_by_module
.entry(dist)
.or_insert(Vec::new())
.push(source.0);
LinkableAddress::new_unsafe(dist, index)
}
pub fn get_global(&self, address: GlobalAddress<T>) -> &T {
// Never panic because GlobalAddress is always valid
self.items.get(address.0).unwrap()
}
pub fn get(&self, address: LinkableAddress<T>) -> Option<(&T, GlobalAddress<T>)> {
let addr = self.resolve(address)?;
Some((self.items.get(addr.0)?, addr))
}
pub fn push_global(&mut self, item: T) -> GlobalAddress<T> {
let index = self.items.len();
self.items.push(item);
GlobalAddress(index, std::marker::PhantomData)
}
pub fn push(&mut self, module_index: ModuleIndex, item: T) -> LinkableAddress<T> {
let globa_index = self.items.len();
self.items.push(item);
let addrs = self
.item_addrs_by_module
.entry(module_index)
.or_insert(Vec::new());
let index = addrs.len();
addrs.push(globa_index);
LinkableAddress::new_unsafe(module_index, index)
}
pub fn remove_module(&mut self, index: &ModuleIndex) {
// TODO: GC unlinked items
self.item_addrs_by_module.remove(index);
}
pub fn items(&self, module_index: ModuleIndex) -> Option<Vec<GlobalAddress<T>>> {
let item_addrs = self.item_addrs_by_module.get(&module_index)?;
Some(
item_addrs
.iter()
.map(|index| GlobalAddress(*index, std::marker::PhantomData))
.collect(),
)
}
pub fn is_empty(&self, module_index: ModuleIndex) -> bool {
self.item_addrs_by_module
.get(&module_index)
.map(|v| v.is_empty())
.unwrap_or(true)
}
}
| |
get_browse_nodes_response.py
|
# coding: utf-8
"""
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License.
A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing
permissions and limitations under the License.
"""
"""
ProductAdvertisingAPI
https://webservices.amazon.com/paapi5/documentation/index.html # noqa: E501
"""
import pprint
import re # noqa: F401
import six
from paapi5_python_sdk.browse_nodes_result import BrowseNodesResult # noqa: F401,E501
from paapi5_python_sdk.error_data import ErrorData # noqa: F401,E501
class GetBrowseNodesResponse(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'browse_nodes_result': 'BrowseNodesResult',
'errors': 'list[ErrorData]'
}
attribute_map = {
'browse_nodes_result': 'BrowseNodesResult',
'errors': 'Errors'
}
def __init__(self, browse_nodes_result=None, errors=None): # noqa: E501
"""GetBrowseNodesResponse - a model defined in Swagger""" # noqa: E501
self._browse_nodes_result = None
self._errors = None
self.discriminator = None
if browse_nodes_result is not None:
self.browse_nodes_result = browse_nodes_result
if errors is not None:
|
@property
def browse_nodes_result(self):
"""Gets the browse_nodes_result of this GetBrowseNodesResponse. # noqa: E501
:return: The browse_nodes_result of this GetBrowseNodesResponse. # noqa: E501
:rtype: BrowseNodesResult
"""
return self._browse_nodes_result
@browse_nodes_result.setter
def browse_nodes_result(self, browse_nodes_result):
"""Sets the browse_nodes_result of this GetBrowseNodesResponse.
:param browse_nodes_result: The browse_nodes_result of this GetBrowseNodesResponse. # noqa: E501
:type: BrowseNodesResult
"""
self._browse_nodes_result = browse_nodes_result
@property
def errors(self):
"""Gets the errors of this GetBrowseNodesResponse. # noqa: E501
:return: The errors of this GetBrowseNodesResponse. # noqa: E501
:rtype: list[ErrorData]
"""
return self._errors
@errors.setter
def errors(self, errors):
"""Sets the errors of this GetBrowseNodesResponse.
:param errors: The errors of this GetBrowseNodesResponse. # noqa: E501
:type: list[ErrorData]
"""
self._errors = errors
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(GetBrowseNodesResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GetBrowseNodesResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
self.errors = errors
|
booksManagment.js
|
const Book = require('../models/Book');
const Order = require('../models/Order');
const errors = require('restify-errors');
const strings = require('../strings');
exports.decrementBooksByOrderId = async (req,res,next,id) => {
try{
let confirmed = false, error = false, erorrMessage = strings.NO_COUNT_OF_BOOKS, decremented = false;
const order = await Order.findOne({
_id: id
});
console.log('received order',order.orderedBooks)
|
const book = await Book.findById(orderBooks[i].bookId);
const countInLibrary = book.count;
const countInOrder = orderBooks[i].count;
if(countInOrder>countInLibrary){
error= true;
erorrMessage+=` [Name]: ${book.name} [Author]: ${book.author} [Count in library] ${book.count} ,`;
}
}
if(!error){
confirmed = true;
for (let i = 0; i < orderBooks.length; i++) {
const book = await Book.findById(orderBooks[i].bookId);
const countInLibrary = book.count;
const countInOrder = orderBooks[i].count;
const updated = await Book.findOneAndUpdate({_id:book._id},{
count: countInLibrary - countInOrder
});
}
decremented = true;
}
}
return { confirmed, erorrMessage, decremented };
}catch(e){console.error(e);}
};
|
let orderBooks = order.orderedBooks;
if(orderBooks.length){
for (let i = 0; i < orderBooks.length; i++) {
|
testing-example-helpers.ts
|
import Page from '@atlaskit/webdriver-runner/wd-wrapper';
import { getExampleUrl } from '@atlaskit/visual-regression/helper';
import { Props } from '../../ui/Renderer';
import { selectors } from './page-objects/_renderer';
// AFP-2532 TODO: Fix automatic suppressions below
// eslint-disable-next-line @atlassian/tangerine/import/entry-points
import { ThemeModes } from '@atlaskit/theme';
import { GasPurePayload } from '@atlaskit/analytics-gas-types';
export type RendererPropsOverrides = { [T in keyof Props]?: Props[T] } & {
showSidebar?: boolean;
withRendererActions?: boolean;
mockInlineComments?: boolean;
themeMode?: ThemeModes;
};
export async function mountRenderer(
page: Page,
props?: RendererPropsOverrides,
adf?: Object,
) {
await page.waitForSelector('#renderer-container');
await page.executeAsync(
(props, adf, done: () => void) => {
function waitAndCall() {
let win = window as any;
if (win.__mountRenderer) {
win.__mountRenderer(props, adf);
done();
} else {
// There is no need to implement own timeout, if done() is not called on time,
// webdriver will throw with own timeout.
setTimeout(waitAndCall, 20);
}
}
waitAndCall();
},
props,
adf,
);
await page.waitForSelector(selectors.container, { timeout: 500 });
}
export async function getEvents(page: Page): Promise<GasPurePayload[]> {
return page.execute(() => (window as any).__analytics.events);
}
export async function goToRendererTestingExample(
client: ConstructorParameters<typeof Page>[0],
) {
const page = new Page(client);
const currentUrl = await page.url();
const url = getExampleUrl(
'editor',
'renderer',
'testing',
// @ts-ignore
global.__BASEURL__,
);
if (currentUrl !== url) {
await page.goto(url);
}
await page.maximizeWindow();
|
return page;
}
|
|
test_agent_interface.py
|
import pytest
import torch
from gym.spaces import Discrete, MultiDiscrete, MultiBinary, Dict, Tuple, Box
from blobrl.agents import AgentInterface
class MOCKAgentInterface(AgentInterface):
def __init__(self, observation_space, action_space, device):
super().__init__(observation_space, action_space, device)
def get_action(self, observation):
pass
def enable_exploration(self):
pass
def
|
(self):
pass
def learn(self, observation, action, reward, next_observation, done) -> None:
pass
def episode_finished(self) -> None:
pass
def save(self, file_name, dire_name="."):
pass
@classmethod
def load(cls, file_name, dire_name=".", device=None):
pass
def __str__(self):
return ""
class TestAgentInterface:
__test__ = True
agent = MOCKAgentInterface
list_work = [
[Discrete(3), Discrete(1)],
[Discrete(3), Discrete(3)],
[Discrete(10), Discrete(50)],
[MultiDiscrete([3]), MultiDiscrete([1])],
[MultiDiscrete([3, 3]), MultiDiscrete([3, 3])],
[MultiDiscrete([4, 4, 4]), MultiDiscrete([50, 4, 4])],
[MultiDiscrete([[100, 3], [3, 5]]), MultiDiscrete([[100, 3], [3, 5]])],
[MultiDiscrete([[[100, 3], [3, 5]], [[100, 3], [3, 5]]]),
MultiDiscrete([[[100, 3], [3, 5]], [[100, 3], [3, 5]]])],
[MultiBinary(1), MultiBinary(1)],
[MultiBinary(3), MultiBinary(3)],
# [MultiBinary([3, 2]), MultiBinary([3, 2])], # Don't work yet because gym don't implemented this
[Box(low=0, high=10, shape=[1]), Box(low=0, high=10, shape=[1])],
[Box(low=0, high=10, shape=[2, 2]), Box(low=0, high=10, shape=[2, 2])],
[Box(low=0, high=10, shape=[2, 2, 2]), Box(low=0, high=10, shape=[2, 2, 2])],
[Tuple([Discrete(1), MultiDiscrete([1, 1])]), Tuple([Discrete(1), MultiDiscrete([1, 1])])],
[Dict({"first": Discrete(1), "second": MultiDiscrete([1, 1])}),
Dict({"first": Discrete(1), "second": MultiDiscrete([1, 1])})],
]
list_fail = [
[None, None],
["dedrfe", "qdzq"],
[1215.4154, 157.48],
["zdzd", (Discrete(1))],
[Discrete(1), "zdzd"],
["zdzd", (1, 4, 7)],
[(1, 4, 7), "zdzd"],
[152, 485]
]
def test_init(self):
for o, a in self.list_work:
with pytest.raises(TypeError):
self.agent(o, a, "cpu")
for o, a in self.list_fail:
with pytest.raises(TypeError):
self.agent(o, a, "cpu")
def test_device(self):
for o, a in self.list_work:
device = torch.device("cpu")
assert device == self.agent(o, a, device).device
device = None
assert torch.device("cpu") == self.agent(o, a, device).device
for device in ["dzeqdzqd", 1512, object(), 151.515]:
with pytest.raises(TypeError):
self.agent(o, a, device)
if torch.cuda.is_available():
self.agent(o, a, torch.device("cuda"))
def test__str__(self):
pass
|
disable_exploration
|
zip.go
|
// Copyright 2015 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package zip
import (
"bytes"
"compress/flate"
"errors"
"fmt"
"hash/crc32"
"io"
"io/ioutil"
"os"
"path/filepath"
"sort"
"strings"
"sync"
"syscall"
"time"
"unicode"
"github.com/google/blueprint/pathtools"
"android/soong/jar"
"android/soong/third_party/zip"
)
// Block size used during parallel compression of a single file.
const parallelBlockSize = 1 * 1024 * 1024 // 1MB
// Minimum file size to use parallel compression. It requires more
// flate.Writer allocations, since we can't change the dictionary
// during Reset
const minParallelFileSize = parallelBlockSize * 6
// Size of the ZIP compression window (32KB)
const windowSize = 32 * 1024
type nopCloser struct {
io.Writer
}
func (nopCloser) Close() error {
return nil
}
type byteReaderCloser struct {
*bytes.Reader
io.Closer
}
type pathMapping struct {
dest, src string
zipMethod uint16
}
type FileArg struct {
PathPrefixInZip, SourcePrefixToStrip string
SourceFiles []string
JunkPaths bool
GlobDir string
}
type FileArgsBuilder struct {
state FileArg
err error
fs pathtools.FileSystem
fileArgs []FileArg
}
func NewFileArgsBuilder() *FileArgsBuilder {
return &FileArgsBuilder{
fs: pathtools.OsFs,
}
}
func (b *FileArgsBuilder) JunkPaths(v bool) *FileArgsBuilder {
b.state.JunkPaths = v
b.state.SourcePrefixToStrip = ""
return b
}
func (b *FileArgsBuilder) SourcePrefixToStrip(prefixToStrip string) *FileArgsBuilder {
b.state.JunkPaths = false
b.state.SourcePrefixToStrip = prefixToStrip
return b
}
func (b *FileArgsBuilder) PathPrefixInZip(rootPrefix string) *FileArgsBuilder {
b.state.PathPrefixInZip = rootPrefix
return b
}
func (b *FileArgsBuilder) File(name string) *FileArgsBuilder {
if b.err != nil {
return b
}
arg := b.state
arg.SourceFiles = []string{name}
b.fileArgs = append(b.fileArgs, arg)
return b
}
func (b *FileArgsBuilder) Dir(name string) *FileArgsBuilder {
if b.err != nil {
return b
}
arg := b.state
arg.GlobDir = name
b.fileArgs = append(b.fileArgs, arg)
return b
}
// List reads the file names from the given file and adds them to the source files list.
func (b *FileArgsBuilder) List(name string) *FileArgsBuilder {
if b.err != nil {
return b
}
f, err := b.fs.Open(name)
if err != nil {
b.err = err
return b
}
defer f.Close()
list, err := ioutil.ReadAll(f)
if err != nil {
b.err = err
return b
}
arg := b.state
arg.SourceFiles = strings.Fields(string(list))
b.fileArgs = append(b.fileArgs, arg)
return b
}
// RspFile reads the file names from given .rsp file and adds them to the source files list.
func (b *FileArgsBuilder) RspFile(name string) *FileArgsBuilder {
if b.err != nil {
return b
}
f, err := b.fs.Open(name)
if err != nil {
b.err = err
return b
}
defer f.Close()
list, err := ioutil.ReadAll(f)
if err != nil {
b.err = err
return b
}
arg := b.state
arg.SourceFiles = ReadRespFile(list)
for i := range arg.SourceFiles {
arg.SourceFiles[i] = pathtools.MatchEscape(arg.SourceFiles[i])
}
b.fileArgs = append(b.fileArgs, arg)
return b
}
func (b *FileArgsBuilder) Error() error {
if b == nil {
return nil
}
return b.err
}
func (b *FileArgsBuilder) FileArgs() []FileArg {
if b == nil {
return nil
}
return b.fileArgs
}
type IncorrectRelativeRootError struct {
RelativeRoot string
Path string
}
func (x IncorrectRelativeRootError) Error() string {
return fmt.Sprintf("path %q is outside relative root %q", x.Path, x.RelativeRoot)
}
type ZipWriter struct {
time time.Time
createdFiles map[string]string
createdDirs map[string]string
directories bool
errors chan error
writeOps chan chan *zipEntry
cpuRateLimiter *CPURateLimiter
memoryRateLimiter *MemoryRateLimiter
compressorPool sync.Pool
compLevel int
followSymlinks pathtools.ShouldFollowSymlinks
ignoreMissingFiles bool
stderr io.Writer
fs pathtools.FileSystem
}
type zipEntry struct {
fh *zip.FileHeader
// List of delayed io.Reader
futureReaders chan chan io.Reader
// Only used for passing into the MemoryRateLimiter to ensure we
// release as much memory as much as we request
allocatedSize int64
}
type ZipArgs struct {
FileArgs []FileArg
OutputFilePath string
EmulateJar bool
SrcJar bool
AddDirectoryEntriesToZip bool
CompressionLevel int
ManifestSourcePath string
NumParallelJobs int
NonDeflatedFiles map[string]bool
WriteIfChanged bool
StoreSymlinks bool
IgnoreMissingFiles bool
Stderr io.Writer
Filesystem pathtools.FileSystem
}
const NOQUOTE = '\x00'
func ReadRespFile(bytes []byte) []string {
var args []string
var arg []rune
isEscaping := false
quotingStart := NOQUOTE
for _, c := range string(bytes) {
switch {
case isEscaping:
if quotingStart == '"' {
if !(c == '"' || c == '\\') {
// '\"' or '\\' will be escaped under double quoting.
arg = append(arg, '\\')
}
}
arg = append(arg, c)
isEscaping = false
case c == '\\' && quotingStart != '\'':
isEscaping = true
case quotingStart == NOQUOTE && (c == '\'' || c == '"'):
quotingStart = c
case quotingStart != NOQUOTE && c == quotingStart:
quotingStart = NOQUOTE
case quotingStart == NOQUOTE && unicode.IsSpace(c):
// Current character is a space outside quotes
if len(arg) != 0 {
args = append(args, string(arg))
}
arg = arg[:0]
default:
arg = append(arg, c)
}
}
if len(arg) != 0 {
args = append(args, string(arg))
}
return args
}
func zipTo(args ZipArgs, w io.Writer) error {
if args.EmulateJar {
args.AddDirectoryEntriesToZip = true
}
// Have Glob follow symlinks if they are not being stored as symlinks in the zip file.
followSymlinks := pathtools.ShouldFollowSymlinks(!args.StoreSymlinks)
z := &ZipWriter{
time: jar.DefaultTime,
createdDirs: make(map[string]string),
createdFiles: make(map[string]string),
directories: args.AddDirectoryEntriesToZip,
compLevel: args.CompressionLevel,
followSymlinks: followSymlinks,
ignoreMissingFiles: args.IgnoreMissingFiles,
stderr: args.Stderr,
fs: args.Filesystem,
}
if z.fs == nil {
z.fs = pathtools.OsFs
}
if z.stderr == nil {
z.stderr = os.Stderr
}
pathMappings := []pathMapping{}
noCompression := args.CompressionLevel == 0
for _, fa := range args.FileArgs {
var srcs []string
for _, s := range fa.SourceFiles {
s = strings.TrimSpace(s)
if s == "" {
continue
}
globbed, _, err := z.fs.Glob(s, nil, followSymlinks)
if err != nil {
return err
}
if len(globbed) == 0 {
err := &os.PathError{
Op: "lstat",
Path: s,
Err: os.ErrNotExist,
}
if args.IgnoreMissingFiles {
fmt.Fprintln(z.stderr, "warning:", err)
} else {
return err
}
}
srcs = append(srcs, globbed...)
}
if fa.GlobDir != "" {
if exists, isDir, err := z.fs.Exists(fa.GlobDir); err != nil {
return err
} else if !exists && !args.IgnoreMissingFiles {
err := &os.PathError{
Op: "lstat",
Path: fa.GlobDir,
Err: os.ErrNotExist,
}
if args.IgnoreMissingFiles {
fmt.Fprintln(z.stderr, "warning:", err)
} else {
return err
}
} else if !isDir && !args.IgnoreMissingFiles {
err := &os.PathError{
Op: "lstat",
Path: fa.GlobDir,
Err: syscall.ENOTDIR,
}
if args.IgnoreMissingFiles {
fmt.Fprintln(z.stderr, "warning:", err)
} else {
return err
}
}
globbed, _, err := z.fs.Glob(filepath.Join(fa.GlobDir, "**/*"), nil, followSymlinks)
if err != nil {
return err
}
srcs = append(srcs, globbed...)
}
for _, src := range srcs {
err := fillPathPairs(fa, src, &pathMappings, args.NonDeflatedFiles, noCompression)
if err != nil {
return err
}
}
}
return z.write(w, pathMappings, args.ManifestSourcePath, args.EmulateJar, args.SrcJar, args.NumParallelJobs)
}
// Zip creates an output zip archive from given sources.
func Zip(args ZipArgs) error {
if args.OutputFilePath == "" {
return fmt.Errorf("output file path must be nonempty")
}
buf := &bytes.Buffer{}
var out io.Writer = buf
if !args.WriteIfChanged {
f, err := os.Create(args.OutputFilePath)
if err != nil {
return err
}
defer f.Close()
defer func() {
if err != nil {
os.Remove(args.OutputFilePath)
}
}()
out = f
}
err := zipTo(args, out)
if err != nil {
return err
}
if args.WriteIfChanged {
err := pathtools.WriteFileIfChanged(args.OutputFilePath, buf.Bytes(), 0666)
if err != nil {
return err
}
}
return nil
}
func fillPathPairs(fa FileArg, src string, pathMappings *[]pathMapping,
nonDeflatedFiles map[string]bool, noCompression bool) error
|
func jarSort(mappings []pathMapping) {
sort.SliceStable(mappings, func(i int, j int) bool {
return jar.EntryNamesLess(mappings[i].dest, mappings[j].dest)
})
}
func (z *ZipWriter) write(f io.Writer, pathMappings []pathMapping, manifest string, emulateJar, srcJar bool,
parallelJobs int) error {
z.errors = make(chan error)
defer close(z.errors)
// This channel size can be essentially unlimited -- it's used as a fifo
// queue decouple the CPU and IO loads. Directories don't require any
// compression time, but still cost some IO. Similar with small files that
// can be very fast to compress. Some files that are more difficult to
// compress won't take a corresponding longer time writing out.
//
// The optimum size here depends on your CPU and IO characteristics, and
// the the layout of your zip file. 1000 was chosen mostly at random as
// something that worked reasonably well for a test file.
//
// The RateLimit object will put the upper bounds on the number of
// parallel compressions and outstanding buffers.
z.writeOps = make(chan chan *zipEntry, 1000)
z.cpuRateLimiter = NewCPURateLimiter(int64(parallelJobs))
z.memoryRateLimiter = NewMemoryRateLimiter(0)
defer func() {
z.cpuRateLimiter.Stop()
z.memoryRateLimiter.Stop()
}()
if manifest != "" && !emulateJar {
return errors.New("must specify --jar when specifying a manifest via -m")
}
if emulateJar {
// manifest may be empty, in which case addManifest will fill in a default
pathMappings = append(pathMappings, pathMapping{jar.ManifestFile, manifest, zip.Deflate})
jarSort(pathMappings)
}
go func() {
var err error
defer close(z.writeOps)
for _, ele := range pathMappings {
if emulateJar && ele.dest == jar.ManifestFile {
err = z.addManifest(ele.dest, ele.src, ele.zipMethod)
} else {
err = z.addFile(ele.dest, ele.src, ele.zipMethod, emulateJar, srcJar)
}
if err != nil {
z.errors <- err
return
}
}
}()
zipw := zip.NewWriter(f)
var currentWriteOpChan chan *zipEntry
var currentWriter io.WriteCloser
var currentReaders chan chan io.Reader
var currentReader chan io.Reader
var done bool
for !done {
var writeOpsChan chan chan *zipEntry
var writeOpChan chan *zipEntry
var readersChan chan chan io.Reader
if currentReader != nil {
// Only read and process errors
} else if currentReaders != nil {
readersChan = currentReaders
} else if currentWriteOpChan != nil {
writeOpChan = currentWriteOpChan
} else {
writeOpsChan = z.writeOps
}
select {
case writeOp, ok := <-writeOpsChan:
if !ok {
done = true
}
currentWriteOpChan = writeOp
case op := <-writeOpChan:
currentWriteOpChan = nil
var err error
if op.fh.Method == zip.Deflate {
currentWriter, err = zipw.CreateCompressedHeader(op.fh)
} else {
var zw io.Writer
op.fh.CompressedSize64 = op.fh.UncompressedSize64
zw, err = zipw.CreateHeaderAndroid(op.fh)
currentWriter = nopCloser{zw}
}
if err != nil {
return err
}
currentReaders = op.futureReaders
if op.futureReaders == nil {
currentWriter.Close()
currentWriter = nil
}
z.memoryRateLimiter.Finish(op.allocatedSize)
case futureReader, ok := <-readersChan:
if !ok {
// Done with reading
currentWriter.Close()
currentWriter = nil
currentReaders = nil
}
currentReader = futureReader
case reader := <-currentReader:
_, err := io.Copy(currentWriter, reader)
if err != nil {
return err
}
currentReader = nil
case err := <-z.errors:
return err
}
}
// One last chance to catch an error
select {
case err := <-z.errors:
return err
default:
zipw.Close()
return nil
}
}
// imports (possibly with compression) <src> into the zip at sub-path <dest>
func (z *ZipWriter) addFile(dest, src string, method uint16, emulateJar, srcJar bool) error {
var fileSize int64
var executable bool
var s os.FileInfo
var err error
if z.followSymlinks {
s, err = z.fs.Stat(src)
} else {
s, err = z.fs.Lstat(src)
}
if err != nil {
if os.IsNotExist(err) && z.ignoreMissingFiles {
fmt.Fprintln(z.stderr, "warning:", err)
return nil
}
return err
}
createParentDirs := func(dest, src string) error {
if err := z.writeDirectory(filepath.Dir(dest), src, emulateJar); err != nil {
return err
}
if prev, exists := z.createdDirs[dest]; exists {
return fmt.Errorf("destination %q is both a directory %q and a file %q", dest, prev, src)
}
if prev, exists := z.createdFiles[dest]; exists {
return fmt.Errorf("destination %q has two files %q and %q", dest, prev, src)
}
z.createdFiles[dest] = src
return nil
}
if s.IsDir() {
if z.directories {
return z.writeDirectory(dest, src, emulateJar)
}
return nil
} else if s.Mode()&os.ModeSymlink != 0 {
err = createParentDirs(dest, src)
if err != nil {
return err
}
return z.writeSymlink(dest, src)
} else if s.Mode().IsRegular() {
r, err := z.fs.Open(src)
if err != nil {
return err
}
if srcJar && filepath.Ext(src) == ".java" {
// rewrite the destination using the package path if it can be determined
pkg, err := jar.JavaPackage(r, src)
if err != nil {
// ignore errors for now, leaving the file at in its original location in the zip
} else {
dest = filepath.Join(filepath.Join(strings.Split(pkg, ".")...), filepath.Base(src))
}
_, err = r.Seek(0, io.SeekStart)
if err != nil {
return err
}
}
fileSize = s.Size()
executable = s.Mode()&0100 != 0
header := &zip.FileHeader{
Name: dest,
Method: method,
UncompressedSize64: uint64(fileSize),
}
if executable {
header.SetMode(0700)
}
err = createParentDirs(dest, src)
if err != nil {
return err
}
return z.writeFileContents(header, r)
} else {
return fmt.Errorf("%s is not a file, directory, or symlink", src)
}
}
func (z *ZipWriter) addManifest(dest string, src string, _ uint16) error {
if prev, exists := z.createdDirs[dest]; exists {
return fmt.Errorf("destination %q is both a directory %q and a file %q", dest, prev, src)
}
if prev, exists := z.createdFiles[dest]; exists {
return fmt.Errorf("destination %q has two files %q and %q", dest, prev, src)
}
if err := z.writeDirectory(filepath.Dir(dest), src, true); err != nil {
return err
}
var contents []byte
if src != "" {
f, err := z.fs.Open(src)
if err != nil {
return err
}
contents, err = ioutil.ReadAll(f)
f.Close()
if err != nil {
return err
}
}
fh, buf, err := jar.ManifestFileContents(contents)
if err != nil {
return err
}
reader := &byteReaderCloser{bytes.NewReader(buf), ioutil.NopCloser(nil)}
return z.writeFileContents(fh, reader)
}
func (z *ZipWriter) writeFileContents(header *zip.FileHeader, r pathtools.ReaderAtSeekerCloser) (err error) {
header.SetModTime(z.time)
compressChan := make(chan *zipEntry, 1)
z.writeOps <- compressChan
// Pre-fill a zipEntry, it will be sent in the compressChan once
// we're sure about the Method and CRC.
ze := &zipEntry{
fh: header,
}
ze.allocatedSize = int64(header.UncompressedSize64)
z.cpuRateLimiter.Request()
z.memoryRateLimiter.Request(ze.allocatedSize)
fileSize := int64(header.UncompressedSize64)
if fileSize == 0 {
fileSize = int64(header.UncompressedSize)
}
if header.Method == zip.Deflate && fileSize >= minParallelFileSize {
wg := new(sync.WaitGroup)
// Allocate enough buffer to hold all readers. We'll limit
// this based on actual buffer sizes in RateLimit.
ze.futureReaders = make(chan chan io.Reader, (fileSize/parallelBlockSize)+1)
// Calculate the CRC in the background, since reading the entire
// file could take a while.
//
// We could split this up into chunks as well, but it's faster
// than the compression. Due to the Go Zip API, we also need to
// know the result before we can begin writing the compressed
// data out to the zipfile.
wg.Add(1)
go z.crcFile(r, ze, compressChan, wg)
for start := int64(0); start < fileSize; start += parallelBlockSize {
sr := io.NewSectionReader(r, start, parallelBlockSize)
resultChan := make(chan io.Reader, 1)
ze.futureReaders <- resultChan
z.cpuRateLimiter.Request()
last := !(start+parallelBlockSize < fileSize)
var dict []byte
if start >= windowSize {
dict, err = ioutil.ReadAll(io.NewSectionReader(r, start-windowSize, windowSize))
if err != nil {
return err
}
}
wg.Add(1)
go z.compressPartialFile(sr, dict, last, resultChan, wg)
}
close(ze.futureReaders)
// Close the file handle after all readers are done
go func(wg *sync.WaitGroup, closer io.Closer) {
wg.Wait()
closer.Close()
}(wg, r)
} else {
go func() {
z.compressWholeFile(ze, r, compressChan)
r.Close()
}()
}
return nil
}
func (z *ZipWriter) crcFile(r io.Reader, ze *zipEntry, resultChan chan *zipEntry, wg *sync.WaitGroup) {
defer wg.Done()
defer z.cpuRateLimiter.Finish()
crc := crc32.NewIEEE()
_, err := io.Copy(crc, r)
if err != nil {
z.errors <- err
return
}
ze.fh.CRC32 = crc.Sum32()
resultChan <- ze
close(resultChan)
}
func (z *ZipWriter) compressPartialFile(r io.Reader, dict []byte, last bool, resultChan chan io.Reader, wg *sync.WaitGroup) {
defer wg.Done()
result, err := z.compressBlock(r, dict, last)
if err != nil {
z.errors <- err
return
}
z.cpuRateLimiter.Finish()
resultChan <- result
}
func (z *ZipWriter) compressBlock(r io.Reader, dict []byte, last bool) (*bytes.Buffer, error) {
buf := new(bytes.Buffer)
var fw *flate.Writer
var err error
if len(dict) > 0 {
// There's no way to Reset a Writer with a new dictionary, so
// don't use the Pool
fw, err = flate.NewWriterDict(buf, z.compLevel, dict)
} else {
var ok bool
if fw, ok = z.compressorPool.Get().(*flate.Writer); ok {
fw.Reset(buf)
} else {
fw, err = flate.NewWriter(buf, z.compLevel)
}
defer z.compressorPool.Put(fw)
}
if err != nil {
return nil, err
}
_, err = io.Copy(fw, r)
if err != nil {
return nil, err
}
if last {
fw.Close()
} else {
fw.Flush()
}
return buf, nil
}
func (z *ZipWriter) compressWholeFile(ze *zipEntry, r io.ReadSeeker, compressChan chan *zipEntry) {
crc := crc32.NewIEEE()
_, err := io.Copy(crc, r)
if err != nil {
z.errors <- err
return
}
ze.fh.CRC32 = crc.Sum32()
_, err = r.Seek(0, 0)
if err != nil {
z.errors <- err
return
}
readFile := func(reader io.ReadSeeker) ([]byte, error) {
_, err := reader.Seek(0, 0)
if err != nil {
return nil, err
}
buf, err := ioutil.ReadAll(reader)
if err != nil {
return nil, err
}
return buf, nil
}
ze.futureReaders = make(chan chan io.Reader, 1)
futureReader := make(chan io.Reader, 1)
ze.futureReaders <- futureReader
close(ze.futureReaders)
if ze.fh.Method == zip.Deflate {
compressed, err := z.compressBlock(r, nil, true)
if err != nil {
z.errors <- err
return
}
if uint64(compressed.Len()) < ze.fh.UncompressedSize64 {
futureReader <- compressed
} else {
buf, err := readFile(r)
if err != nil {
z.errors <- err
return
}
ze.fh.Method = zip.Store
futureReader <- bytes.NewReader(buf)
}
} else {
buf, err := readFile(r)
if err != nil {
z.errors <- err
return
}
ze.fh.Method = zip.Store
futureReader <- bytes.NewReader(buf)
}
z.cpuRateLimiter.Finish()
close(futureReader)
compressChan <- ze
close(compressChan)
}
// writeDirectory annotates that dir is a directory created for the src file or directory, and adds
// the directory entry to the zip file if directories are enabled.
func (z *ZipWriter) writeDirectory(dir string, src string, emulateJar bool) error {
// clean the input
dir = filepath.Clean(dir)
// discover any uncreated directories in the path
var zipDirs []string
for dir != "" && dir != "." {
if _, exists := z.createdDirs[dir]; exists {
break
}
if prev, exists := z.createdFiles[dir]; exists {
return fmt.Errorf("destination %q is both a directory %q and a file %q", dir, src, prev)
}
z.createdDirs[dir] = src
// parent directories precede their children
zipDirs = append([]string{dir}, zipDirs...)
dir = filepath.Dir(dir)
}
if z.directories {
// make a directory entry for each uncreated directory
for _, cleanDir := range zipDirs {
var dirHeader *zip.FileHeader
if emulateJar && cleanDir+"/" == jar.MetaDir {
dirHeader = jar.MetaDirFileHeader()
} else {
dirHeader = &zip.FileHeader{
Name: cleanDir + "/",
}
dirHeader.SetMode(0700 | os.ModeDir)
}
dirHeader.SetModTime(z.time)
ze := make(chan *zipEntry, 1)
ze <- &zipEntry{
fh: dirHeader,
}
close(ze)
z.writeOps <- ze
}
}
return nil
}
func (z *ZipWriter) writeSymlink(rel, file string) error {
fileHeader := &zip.FileHeader{
Name: rel,
}
fileHeader.SetModTime(z.time)
fileHeader.SetMode(0777 | os.ModeSymlink)
dest, err := z.fs.Readlink(file)
if err != nil {
return err
}
fileHeader.UncompressedSize64 = uint64(len(dest))
fileHeader.CRC32 = crc32.ChecksumIEEE([]byte(dest))
ze := make(chan *zipEntry, 1)
futureReaders := make(chan chan io.Reader, 1)
futureReader := make(chan io.Reader, 1)
futureReaders <- futureReader
close(futureReaders)
futureReader <- bytes.NewBufferString(dest)
close(futureReader)
ze <- &zipEntry{
fh: fileHeader,
futureReaders: futureReaders,
}
close(ze)
z.writeOps <- ze
return nil
}
|
{
var dest string
if fa.JunkPaths {
dest = filepath.Base(src)
} else {
var err error
dest, err = filepath.Rel(fa.SourcePrefixToStrip, src)
if err != nil {
return err
}
if strings.HasPrefix(dest, "../") {
return IncorrectRelativeRootError{
Path: src,
RelativeRoot: fa.SourcePrefixToStrip,
}
}
}
dest = filepath.Join(fa.PathPrefixInZip, dest)
zipMethod := zip.Deflate
if _, found := nonDeflatedFiles[dest]; found || noCompression {
zipMethod = zip.Store
}
*pathMappings = append(*pathMappings,
pathMapping{dest: dest, src: src, zipMethod: zipMethod})
return nil
}
|
marks.rs
|
//! See test_utils/src/marks.rs
test_utils::marks!(
bogus_paths
name_res_works_for_broken_modules
can_import_enum_variant
type_var_cycles_resolve_completely
type_var_cycles_resolve_as_possible
type_var_resolves_to_int_var
glob_enum
glob_across_crates
std_prelude
match_ergonomics_ref
infer_while_let
macro_rules_from_other_crates_are_visible_with_macro_use
prelude_is_macro_use
coerce_merge_fail_fallback
|
);
|
macro_dollar_crate_self
macro_dollar_crate_other
|
index.js
|
import feathers from '@feathersjs/feathers';
import socketio from '@feathersjs/socketio-client';
import io from 'socket.io-client';
import auth from '@feathersjs/authentication-client';
import constants from './utils/constants.js';
import Strats from './strats/index.js';
import Account from './account/index.js';
import User from './user/index.js';
import Services from './services/index.js';
import Utils from './utils/index.js';
const API = function API(accessToken)
{
this.socket = io(constants.backendUrl, {
transports: ['websocket'],
upgrade: false,
});
this.client = feathers()
.configure(socketio(this.socket, { timeout: 10000 }))
.configure(auth());
this.client.hooks({
error(context)
{
const { error } = context;
if (error.message === 'jwt expired')
{
this.authenticated = false;
}
throw Error(error.message);
// return context;
},
});
this.authenticated = false;
this.accessToken = accessToken;
this.setAuthenticated = (bool) => { this.authenticated = (bool === true); };
this.getAuthenticated = () => { return this.authenticated; };
this.setSocket = (socket) => { this.socket = socket; };
|
this.getSocket = () => { return this.socket; };
this.setClient = (client) => { this.client = client; };
this.getClient = () => { return this.client; };
this.closeAndExit = () =>
{
this.socket.disconnect();
process.exit(0);
};
this.strats = new Strats(this);
this.account = new Account(this);
this.user = new User(this);
this.services = new Services(this);
this.utils = new Utils(this);
};
export default API;
| |
rest_xml.rs
|
use std::io::Write;
use inflector::Inflector;
use ::Service;
use botocore::{Member, Operation, Shape, ShapeType};
use super::{xml_payload_parser, rest_response_parser, rest_request_generator, get_rust_type,
mutate_type_name};
use super::{GenerateProtocol, generate_field_name, error_type_name};
use super::{IoResult, FileWriter};
pub struct RestXmlGenerator;
impl GenerateProtocol for RestXmlGenerator {
fn generate_method_signatures(&self, writer: &mut FileWriter, service: &Service) -> IoResult {
for (operation_name, operation) in service.operations().iter() {
writeln!(writer,"
{documentation}
{method_signature};
",
documentation = generate_documentation(operation),
method_signature = generate_method_signature(operation_name, operation),
)?
}
Ok(())
}
fn generate_method_impls(&self, writer: &mut FileWriter, service: &Service) -> IoResult {
for (operation_name, operation) in service.operations().iter() {
let (request_uri, _) = rest_request_generator::parse_query_string(&operation.http
.request_uri);
writeln!(writer,
"{documentation}
#[allow(unused_variables, warnings)]
{method_signature} {{
{modify_uri}
let mut request = SignedRequest::new(\"{http_method}\", \"{endpoint_prefix}\", &self.region, &request_uri);
{set_headers}
{set_parameters}
{build_payload}
request.sign_with_plus(&try!(self.credentials_provider.credentials()), true);
let mut response = try!(self.dispatcher.dispatch(&request));
match response.status {{
StatusCode::Ok|StatusCode::NoContent|StatusCode::PartialContent => {{
{parse_response_body}
{parse_non_payload}
Ok(result)
}},
_ => {{
let mut body: Vec<u8> = Vec::new();
try!(response.body.read_to_end(&mut body));
Err({error_type}::from_body(String::from_utf8_lossy(&body).as_ref()))
}}
}}
}}
",
documentation = generate_documentation(operation),
http_method = &operation.http.method,
endpoint_prefix = service.endpoint_prefix(),
method_signature = generate_method_signature(operation_name, operation),
error_type = error_type_name(operation_name),
build_payload = generate_payload_serialization(service, operation)
.unwrap_or_else(|| "".to_string()),
modify_uri = rest_request_generator::generate_uri_formatter(&request_uri,
service,
operation)
.unwrap_or_else(|| "".to_string()),
set_headers = rest_request_generator::generate_headers(service, operation)
.unwrap_or_else(|| "".to_string()),
set_parameters =
rest_request_generator::generate_params_loading_string(service,
operation)
.unwrap_or_else(|| "".to_string()),
parse_non_payload =
rest_response_parser::generate_response_headers_parser(service,
operation)
.unwrap_or_else(|| "".to_owned()),
parse_response_body =
xml_payload_parser::generate_response_parser(service, operation, true))?;
}
Ok(())
}
fn generate_prelude(&self, writer: &mut FileWriter, service: &Service) -> IoResult {
let mut imports = "
use std::str::{FromStr};
use std::io::Write;
use xml::reader::ParserConfig;
use rusoto_core::param::{Params, ServiceParams};
use rusoto_core::signature::SignedRequest;
use xml;
use xml::EventReader;
use xml::EventWriter;
use xml::reader::XmlEvent;
use rusoto_core::xmlerror::*;
use rusoto_core::xmlutil::{Next, Peek, XmlParseError, XmlResponse};
use rusoto_core::xmlutil::{peek_at_name, characters, end_element, start_element, skip_tree};
enum DeserializerNext {
Close,
Skip,
Element(String),
}"
.to_owned();
if service.service_type_name() == "S3" {
imports += "
use md5;
use base64;";
}
writeln!(writer, "{}", imports)
}
fn generate_struct_attributes(&self,
_serialized: bool,
_deserialized: bool)
-> String {
let derived = vec!["Default", "Debug"];
format!("#[derive({})]", derived.join(","))
}
fn generate_serializer(&self, name: &str, shape: &Shape, service: &Service) -> Option<String> {
if name != "RestoreRequest" && name.ends_with("Request") {
if used_as_request_payload(shape) {
return Some(generate_request_payload_serializer(name, shape));
} else {
return None;
}
}
let ty = get_rust_type(service, name, shape, false, self.timestamp_type());
Some(format!("
pub struct {name}Serializer;
impl {name}Serializer {{
{serializer_signature} {{
{serializer_body}
}}
}}
",
name = name,
serializer_body = generate_serializer_body(shape, service),
serializer_signature = generate_serializer_signature(&ty),
))
}
fn generate_deserializer(&self,
name: &str,
shape: &Shape,
service: &Service)
-> Option<String> {
let ty = get_rust_type(service, name, shape, false, self.timestamp_type());
Some(xml_payload_parser::generate_deserializer(name, &ty, shape, service))
}
fn timestamp_type(&self) -> &'static str {
"String"
}
}
fn generate_documentation(operation: &Operation) -> String {
match operation.documentation {
Some(ref docs) => {
format!("#[doc=\"{}\"]",
docs.replace("\\", "\\\\").replace("\"", "\\\""))
}
None => "".to_owned(),
}
}
fn generate_payload_serialization(service: &Service, operation: &Operation) -> Option<String> {
// nothing to do if there's no input type
if operation.input.is_none() {
return None;
}
let input = operation.input.as_ref().unwrap();
let input_shape = service.get_shape(&input.shape).unwrap();
let mut parts: Vec<String> = Vec::new();
// the payload field determines which member of the input shape is sent as the request body (if any)
if input_shape.payload.is_some() {
parts.push("let mut payload: Vec<u8>;".to_owned());
parts.push(generate_payload_member_serialization(input_shape));
parts.push(generate_service_specific_code(service, operation)
.unwrap_or_else(|| "".to_owned()));
parts.push("request.set_payload(Some(payload));".to_owned());
} else if used_as_request_payload(input_shape) {
// In Route 53, no operation has "payload" parameter but some API actually requires
// payload. In that case, the payload should include members whose "location" parameter is
// missing.
let xmlns = input.xml_namespace.as_ref().unwrap();
parts.push("let mut writer = EventWriter::new(Vec::new());".to_owned());
parts.push(format!("{name}Serializer::serialize(&mut writer, \"{name}\", &input, \"{xmlns}\");", name = input.shape, xmlns = xmlns.uri));
parts.push(generate_service_specific_code(service, operation).unwrap_or_else(|| "".to_owned()));
parts.push("request.set_payload(Some(writer.into_inner()));".to_owned());
}
Some(parts.join("\n"))
}
fn generate_service_specific_code(service: &Service, operation: &Operation) -> Option<String> {
// S3 needs some special handholding. Others may later.
// See `handlers.py` in botocore for more details
match service.service_type_name() {
"S3" => {
match &operation.name[..] {
"PutBucketTagging" |
"PutBucketLifecycle" |
"PutBucketLifecycleConfiguration" |
"PutBucketCors" |
"DeleteObjects" |
"PutBucketReplication" => {
Some("let digest = md5::compute(&payload);
// need to deref digest and then pass that reference:
request.add_header(\"Content-MD5\", &base64::encode(&(*digest)));"
.to_owned())
}
_ => None,
}
}
_ => None,
}
}
fn generate_payload_member_serialization(shape: &Shape) -> String {
let payload_field = shape.payload.as_ref().unwrap();
let payload_member = shape.members.as_ref().unwrap().get(payload_field).unwrap();
// if the member is 'streaming', it's a Vec<u8> that should just be delivered as the body
if payload_member.streaming() {
format!("payload = input.{}.clone().unwrap();",
payload_field.to_snake_case())
}
// otherwise serialize the object to XML and use that as the payload
else if shape.required(payload_field) {
// some payload types are not required members of their shape
format!("let mut writer = EventWriter::new(Vec::new()); {xml_type}Serializer::serialize(&mut writer, \"{xml_type}\", &input.{payload_field}); payload = writer.into_inner();",
payload_field = payload_field.to_snake_case(),
xml_type = payload_member.shape)
} else {
format!("if input.{payload_field}.is_some() {{
let mut writer = EventWriter::new(Vec::new());
{xml_type}Serializer::serialize(&mut writer, \"{location_name}\", input.{payload_field}.as_ref().unwrap());
payload = writer.into_inner();
}} else {{
payload = Vec::new();
}}",
payload_field = payload_field.to_snake_case(),
xml_type = payload_member.shape,
location_name = payload_member.location_name.as_ref().unwrap())
}
}
fn generate_method_signature(operation_name: &str, operation: &Operation) -> String {
if operation.input.is_some() {
format!(
"fn {operation_name}(&self, input: &{input_type}) -> Result<{output_type}, {error_type}>",
input_type = operation.input.as_ref().unwrap().shape,
operation_name = operation_name.to_snake_case(),
output_type = &operation.output_shape_or("()"),
error_type = error_type_name(operation_name),
)
} else {
format!(
"fn {operation_name}(&self) -> Result<{output_type}, {error_type}>",
operation_name = operation_name.to_snake_case(),
error_type = error_type_name(operation_name),
output_type = &operation.output_shape_or("()"),
)
}
}
fn generate_serializer_body(shape: &Shape, service: &Service) -> String {
match shape.shape_type {
ShapeType::List => generate_list_serializer(shape),
ShapeType::Map => generate_map_serializer(shape),
ShapeType::Structure => generate_struct_serializer(shape, service),
_ => generate_primitive_serializer(shape),
}
}
fn generate_serializer_signature(name: &str) -> String {
format!("
#[allow(unused_variables, warnings)]
pub fn serialize<W>(mut writer: &mut EventWriter<W>, name: &str, obj: &{}) -> Result<(), xml::writer::Error> where W: Write",
name)
}
fn generate_primitive_serializer(shape: &Shape) -> String {
let value_str = match shape.shape_type {
ShapeType::Blob => "String::from_utf8(obj.to_vec()).expect(\"Not a UTF-8 string\")",
_ => "obj.to_string()",
};
format!("
writer.write(xml::writer::XmlEvent::start_element(name))?;
writer.write(xml::writer::XmlEvent::characters(&format!(\"{{value}}\", value = {value_str})))?;
writer.write(xml::writer::XmlEvent::end_element())
", value_str = value_str)
}
fn generate_list_serializer(shape: &Shape) -> String {
// flattened lists don't have enclosing <FooList> tags
// around the list elements
let flattened = match shape.flattened {
Some(true) => true,
_ => false,
};
let member = shape.member.as_ref().expect("Member shape undefined");
let element_type = &mutate_type_name(&member.shape);
let mut serializer = "".to_owned();
if flattened {
serializer += &format!("
for element in obj {{
{element_type}Serializer::serialize(writer, name, element)?;
}}", element_type = element_type);
} else {
serializer += "writer.write(xml::writer::XmlEvent::start_element(name))?;";
serializer += &format!("
for element in obj {{
{element_type}Serializer::serialize(writer, \"{location_name}\", element)?;
}}", element_type = element_type, location_name = member.location_name.as_ref().unwrap());
serializer += "writer.write(xml::writer::XmlEvent::end_element())?;";
}
serializer += "Ok(())";
serializer
}
fn generate_map_serializer(_shape: &Shape) -> String {
// No rest-xml services use Map shape type currently.
unreachable!()
}
fn generate_struct_serializer(shape: &Shape, service: &Service) -> String {
let mut serializer = "writer.write(xml::writer::XmlEvent::start_element(name))?;".to_owned();
for (member_name, member) in shape.members.as_ref().unwrap().iter() {
// look up member.shape in all_shapes. use that shape.member.location_name
let location_name = member.location_name.as_ref().unwrap_or(member_name);
if member.deprecated() {
continue;
}
let member_shape = service.shape_for_member(member).unwrap();
match member_shape.shape_type {
ShapeType::List | ShapeType::Map | ShapeType::Structure => {
serializer += &generate_complex_struct_field_serializer(shape,
member,
location_name,
member_name);
}
_ => {
serializer +=
&generate_primitive_struct_field_serializer(shape, location_name, member_name);
}
}
}
serializer += "writer.write(xml::writer::XmlEvent::end_element())";
serializer
}
fn generate_primitive_struct_field_serializer(shape: &Shape,
location_name: &str,
member_name: &str)
-> String
|
fn generate_complex_struct_field_serializer(shape: &Shape,
member: &Member,
location_name: &str,
member_name: &str)
-> String {
if shape.required(member_name) {
format!("{xml_type}Serializer::serialize(&mut writer, \"{location_name}\", &obj.{field_name})?;",
xml_type = member.shape,
location_name = location_name,
field_name = generate_field_name(member_name))
} else {
format!("
if let Some(ref value) = obj.{field_name} {{
&{xml_type}Serializer::serialize(&mut writer, \"{location_name}\", value)?;
}}",
xml_type = member.shape,
location_name = location_name,
field_name = generate_field_name(member_name))
}
}
fn used_as_request_payload(shape: &Shape) -> bool {
if shape.payload.is_some() {
return false;
}
if let Some(ref members) = shape.members {
for member in members.values() {
if member.location.is_none() {
return true;
}
}
}
false
}
fn generate_request_payload_serializer(name: &str, shape: &Shape) -> String {
let mut parts = Vec::new();
parts.push(format!("pub struct {name}Serializer;", name = name));
parts.push(format!("impl {name}Serializer {{", name = name));
parts.push("#[allow(unused_variables, warnings)]".to_owned());
parts.push(format!("pub fn serialize<W>(mut writer: &mut EventWriter<W>, name: &str, obj: &{name}, xmlns: &str) -> Result<(), xml::writer::Error> where W: Write {{", name = name));
parts.push("writer.write(xml::writer::XmlEvent::start_element(name).default_ns(xmlns))?;".to_owned());
for (member_name, member) in shape.members.as_ref().unwrap() {
if member.location.is_none() {
let location_name = member.location_name.as_ref().unwrap_or(member_name);
parts.push(generate_complex_struct_field_serializer(shape, member, location_name, member_name));
}
}
parts.push("writer.write(xml::writer::XmlEvent::end_element())".to_owned());
parts.push("}}".to_owned());
parts.join("")
}
|
{
if shape.required(member_name) {
format!(
"writer.write(xml::writer::XmlEvent::start_element(\"{location_name}\"))?;
writer.write(xml::writer::XmlEvent::characters(&format!(\"{{value}}\", value=obj.{field_name})))?;
writer.write(xml::writer::XmlEvent::end_element())?;",
field_name = generate_field_name(member_name),
location_name = location_name,
)
} else {
format!(
"if let Some(ref value) = obj.{field_name} {{
writer.write(xml::writer::XmlEvent::start_element(\"{location_name}\"))?;
writer.write(xml::writer::XmlEvent::characters(&format!(\"{{value}}\", value=value)));
writer.write(xml::writer::XmlEvent::end_element())?;
}}",
field_name = generate_field_name(member_name),
location_name = location_name,
)
}
}
|
models.py
|
# -*- coding: utf-8 -*-
from sqlalchemy import Column, Integer, String, Text, DateTime
from flaski.database import Base
from datetime import datetime
class WikiContent(Base):
|
__tablename__ = 'wikicontents'
id = Column(Integer, primary_key = True)
title = Column(String(128), unique = True)
body = Column(Text)
def __init__(self, title = None, body = None, president = None, establish = None, date = None):
self.title = title
self.body = body
def __repr__(self):
return '<Title %r>' % (self.title)
|
|
index.ts
|
import draft from './draft';
import * as projectAttachments from './project-attachments-queries';
import * as projectCreate from './project-create-queries';
import * as projectDelete from './project-delete-queries';
import * as projectTreatments from './project-treatments-queries';
import * as projectUpdate from './project-update-queries';
import * as projectView from './project-view-queries';
export default {
...projectTreatments,
...projectAttachments,
...projectCreate,
...projectDelete,
...projectUpdate,
...projectView,
draft
|
};
| |
0029_auto_20200630_0454.py
|
# Generated by Django 3.0.6 on 2020-06-30 04:54
import datetime
from django.db import migrations, models
|
dependencies = [
('app1', '0028_resources_resource_date_time'),
]
operations = [
migrations.AlterField(
model_name='resources',
name='resource_content',
field=models.CharField(max_length=100),
),
migrations.AlterField(
model_name='resources',
name='resource_date_time',
field=models.DateTimeField(default=datetime.datetime(2020, 6, 30, 4, 54, 42, 601836)),
),
]
|
class Migration(migrations.Migration):
|
0001_initial.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-05-29 18:22
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import uuid
class
|
(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='Sharing',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uuid', models.UUIDField(default=uuid.uuid4, editable=False)),
('public', models.BooleanField(default=True)),
('sharing_id', models.PositiveIntegerField()),
('sharing_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='contenttypes.ContentType')),
],
),
]
|
Migration
|
siteplugins.module.ts
|
// (C) Copyright 2015 Martin Dougiamas
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { NgModule } from '@angular/core';
import { CoreSitePluginsProvider } from './providers/siteplugins';
import { CoreSitePluginsHelperProvider } from './providers/helper';
import { CoreSitePluginsComponentsModule } from './components/components.module';
// List of providers.
export const CORE_SITEPLUGINS_PROVIDERS = [
CoreSitePluginsProvider,
CoreSitePluginsHelperProvider
];
@NgModule({
declarations: [
],
imports: [
CoreSitePluginsComponentsModule
],
providers: CORE_SITEPLUGINS_PROVIDERS
})
export class
|
{
constructor(helper: CoreSitePluginsHelperProvider) {
// Inject the helper even if it isn't used so it's instantiated.
}
}
|
CoreSitePluginsModule
|
raw_pod_controller.go
|
/*
* Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
* This source code is licensed under the Apache License Version 2.0.
*/
package controller
import (
"context"
"encoding/json"
"fmt"
"github.com/pkg/errors"
corev1 "k8s.io/api/core/v1"
_const "nocalhost/internal/nhctl/const"
"nocalhost/internal/nhctl/model"
"nocalhost/pkg/nhctl/log"
"time"
)
//const originalPodDefine = "nocalhost.dev.origin.pod.define"
// RawPodController represents a pod not managed by any controller
type RawPodController struct {
*Controller
}
func (r *RawPodController) ReplaceImage(ctx context.Context, ops *model.DevStartOptions) error {
r.Client.Context(ctx)
originalPod, err := r.Client.GetPod(r.Name)
if err != nil {
return err
}
// Check if pod managed by controller
if len(originalPod.OwnerReferences) > 0 {
return errors.New(fmt.Sprintf("Pod %s is manged by a controller, can not enter DevMode", r.Name))
}
originalPod.Status = corev1.PodStatus{}
originalPod.ResourceVersion = ""
bys, err := json.Marshal(originalPod)
if err != nil {
return errors.WithStack(err)
}
if originalPod.Annotations == nil {
originalPod.Annotations = make(map[string]string, 0)
}
originalPod.Annotations[_const.OriginWorkloadDefinition] = string(bys)
originalPod.Annotations[_const.NocalhostDevContainerAnnotations] =
r.GetDevContainerName(ops.Container)
devContainer, sideCarContainer, devModeVolumes, err :=
r.genContainersAndVolumes(&originalPod.Spec, ops.Container, ops.DevImage, ops.StorageClass, false)
if err != nil
|
patchDevContainerToPodSpec(&originalPod.Spec, ops.Container, devContainer, sideCarContainer, devModeVolumes)
log.Info("Delete original pod...")
if err = r.Client.DeletePodByName(r.Name, 0); err != nil {
return err
}
time.Sleep(1 * time.Second)
log.Info("Create dev pod...")
if _, err = r.Client.CreatePod(originalPod); err != nil {
return err
}
r.patchAfterDevContainerReplaced(ops.Container, originalPod.Kind, originalPod.Name)
r.waitDevPodToBeReady()
return nil
}
func (r *RawPodController) RollBack(reset bool) error {
originPod, err := r.Client.GetPod(r.Name)
if err != nil {
return err
}
podSpec, ok := originPod.Annotations[_const.OriginWorkloadDefinition]
if !ok {
err1 := errors.New(fmt.Sprintf("Annotation %s not found, failed to rollback", _const.OriginWorkloadDefinition))
if reset {
log.WarnE(err1, "")
return nil
}
return err1
}
originPod = &corev1.Pod{}
if err = json.Unmarshal([]byte(podSpec), originPod); err != nil {
return err
}
log.Info(" Deleting current revision...")
if err = r.Client.DeletePodByName(r.Name, 0); err != nil {
return err
}
log.Info(" Recreating original revision...")
if _, err = r.Client.CreatePod(originPod); err != nil {
return err
}
return nil
}
func findDevContainerInPodSpec(pod *corev1.PodSpec, containerName string) (*corev1.Container, error) {
var devContainer *corev1.Container
if containerName != "" {
for index, c := range pod.Containers {
if c.Name == containerName {
return &pod.Containers[index], nil
}
}
return nil, errors.New(fmt.Sprintf("Container %s not found", containerName))
} else {
if len(pod.Containers) > 1 {
return nil, errors.New(
fmt.Sprintf(
"There are more than one container defined," +
"please specify one to start developing",
),
)
}
if len(pod.Containers) == 0 {
return nil, errors.New("No container defined ???")
}
devContainer = &pod.Containers[0]
}
return devContainer, nil
}
|
{
return err
}
|
serviceEndpointPolicy.go
|
// *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v20190401
import (
"context"
"reflect"
"github.com/pkg/errors"
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
// Service End point policy resource.
type ServiceEndpointPolicy struct {
pulumi.CustomResourceState
// A unique read-only string that changes whenever the resource is updated.
Etag pulumi.StringPtrOutput `pulumi:"etag"`
// Resource location.
Location pulumi.StringPtrOutput `pulumi:"location"`
// Resource name.
Name pulumi.StringOutput `pulumi:"name"`
// The provisioning state of the service endpoint policy. Possible values are: 'Updating', 'Deleting', and 'Failed'.
ProvisioningState pulumi.StringOutput `pulumi:"provisioningState"`
// The resource GUID property of the service endpoint policy resource.
ResourceGuid pulumi.StringOutput `pulumi:"resourceGuid"`
// A collection of service endpoint policy definitions of the service endpoint policy.
ServiceEndpointPolicyDefinitions ServiceEndpointPolicyDefinitionResponseArrayOutput `pulumi:"serviceEndpointPolicyDefinitions"`
// A collection of references to subnets.
Subnets SubnetResponseArrayOutput `pulumi:"subnets"`
// Resource tags.
Tags pulumi.StringMapOutput `pulumi:"tags"`
// Resource type.
Type pulumi.StringOutput `pulumi:"type"`
}
// NewServiceEndpointPolicy registers a new resource with the given unique name, arguments, and options.
func NewServiceEndpointPolicy(ctx *pulumi.Context,
name string, args *ServiceEndpointPolicyArgs, opts ...pulumi.ResourceOption) (*ServiceEndpointPolicy, error)
|
// GetServiceEndpointPolicy gets an existing ServiceEndpointPolicy resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func GetServiceEndpointPolicy(ctx *pulumi.Context,
name string, id pulumi.IDInput, state *ServiceEndpointPolicyState, opts ...pulumi.ResourceOption) (*ServiceEndpointPolicy, error) {
var resource ServiceEndpointPolicy
err := ctx.ReadResource("azure-nextgen:network/v20190401:ServiceEndpointPolicy", name, id, state, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// Input properties used for looking up and filtering ServiceEndpointPolicy resources.
type serviceEndpointPolicyState struct {
// A unique read-only string that changes whenever the resource is updated.
Etag *string `pulumi:"etag"`
// Resource location.
Location *string `pulumi:"location"`
// Resource name.
Name *string `pulumi:"name"`
// The provisioning state of the service endpoint policy. Possible values are: 'Updating', 'Deleting', and 'Failed'.
ProvisioningState *string `pulumi:"provisioningState"`
// The resource GUID property of the service endpoint policy resource.
ResourceGuid *string `pulumi:"resourceGuid"`
// A collection of service endpoint policy definitions of the service endpoint policy.
ServiceEndpointPolicyDefinitions []ServiceEndpointPolicyDefinitionResponse `pulumi:"serviceEndpointPolicyDefinitions"`
// A collection of references to subnets.
Subnets []SubnetResponse `pulumi:"subnets"`
// Resource tags.
Tags map[string]string `pulumi:"tags"`
// Resource type.
Type *string `pulumi:"type"`
}
type ServiceEndpointPolicyState struct {
// A unique read-only string that changes whenever the resource is updated.
Etag pulumi.StringPtrInput
// Resource location.
Location pulumi.StringPtrInput
// Resource name.
Name pulumi.StringPtrInput
// The provisioning state of the service endpoint policy. Possible values are: 'Updating', 'Deleting', and 'Failed'.
ProvisioningState pulumi.StringPtrInput
// The resource GUID property of the service endpoint policy resource.
ResourceGuid pulumi.StringPtrInput
// A collection of service endpoint policy definitions of the service endpoint policy.
ServiceEndpointPolicyDefinitions ServiceEndpointPolicyDefinitionResponseArrayInput
// A collection of references to subnets.
Subnets SubnetResponseArrayInput
// Resource tags.
Tags pulumi.StringMapInput
// Resource type.
Type pulumi.StringPtrInput
}
func (ServiceEndpointPolicyState) ElementType() reflect.Type {
return reflect.TypeOf((*serviceEndpointPolicyState)(nil)).Elem()
}
type serviceEndpointPolicyArgs struct {
// A unique read-only string that changes whenever the resource is updated.
Etag *string `pulumi:"etag"`
// Resource ID.
Id *string `pulumi:"id"`
// Resource location.
Location *string `pulumi:"location"`
// The name of the resource group.
ResourceGroupName string `pulumi:"resourceGroupName"`
// A collection of service endpoint policy definitions of the service endpoint policy.
ServiceEndpointPolicyDefinitions []ServiceEndpointPolicyDefinitionType `pulumi:"serviceEndpointPolicyDefinitions"`
// The name of the service endpoint policy.
ServiceEndpointPolicyName *string `pulumi:"serviceEndpointPolicyName"`
// Resource tags.
Tags map[string]string `pulumi:"tags"`
}
// The set of arguments for constructing a ServiceEndpointPolicy resource.
type ServiceEndpointPolicyArgs struct {
// A unique read-only string that changes whenever the resource is updated.
Etag pulumi.StringPtrInput
// Resource ID.
Id pulumi.StringPtrInput
// Resource location.
Location pulumi.StringPtrInput
// The name of the resource group.
ResourceGroupName pulumi.StringInput
// A collection of service endpoint policy definitions of the service endpoint policy.
ServiceEndpointPolicyDefinitions ServiceEndpointPolicyDefinitionTypeArrayInput
// The name of the service endpoint policy.
ServiceEndpointPolicyName pulumi.StringPtrInput
// Resource tags.
Tags pulumi.StringMapInput
}
func (ServiceEndpointPolicyArgs) ElementType() reflect.Type {
return reflect.TypeOf((*serviceEndpointPolicyArgs)(nil)).Elem()
}
type ServiceEndpointPolicyInput interface {
pulumi.Input
ToServiceEndpointPolicyOutput() ServiceEndpointPolicyOutput
ToServiceEndpointPolicyOutputWithContext(ctx context.Context) ServiceEndpointPolicyOutput
}
func (*ServiceEndpointPolicy) ElementType() reflect.Type {
return reflect.TypeOf((*ServiceEndpointPolicy)(nil))
}
func (i *ServiceEndpointPolicy) ToServiceEndpointPolicyOutput() ServiceEndpointPolicyOutput {
return i.ToServiceEndpointPolicyOutputWithContext(context.Background())
}
func (i *ServiceEndpointPolicy) ToServiceEndpointPolicyOutputWithContext(ctx context.Context) ServiceEndpointPolicyOutput {
return pulumi.ToOutputWithContext(ctx, i).(ServiceEndpointPolicyOutput)
}
type ServiceEndpointPolicyOutput struct {
*pulumi.OutputState
}
func (ServiceEndpointPolicyOutput) ElementType() reflect.Type {
return reflect.TypeOf((*ServiceEndpointPolicy)(nil))
}
func (o ServiceEndpointPolicyOutput) ToServiceEndpointPolicyOutput() ServiceEndpointPolicyOutput {
return o
}
func (o ServiceEndpointPolicyOutput) ToServiceEndpointPolicyOutputWithContext(ctx context.Context) ServiceEndpointPolicyOutput {
return o
}
func init() {
pulumi.RegisterOutputType(ServiceEndpointPolicyOutput{})
}
|
{
if args == nil {
return nil, errors.New("missing one or more required arguments")
}
if args.ResourceGroupName == nil {
return nil, errors.New("invalid value for required argument 'ResourceGroupName'")
}
aliases := pulumi.Aliases([]pulumi.Alias{
{
Type: pulumi.String("azure-nextgen:network:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/latest:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20180701:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20180801:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20181001:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20181101:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20181201:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20190201:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20190601:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20190701:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20190801:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20190901:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20191101:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20191201:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200301:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200401:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200501:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200601:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200701:ServiceEndpointPolicy"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200801:ServiceEndpointPolicy"),
},
})
opts = append(opts, aliases)
var resource ServiceEndpointPolicy
err := ctx.RegisterResource("azure-nextgen:network/v20190401:ServiceEndpointPolicy", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
|
Fighters.py
|
import random
import time
class Athlete():
name = ""
health = 100
def __init__(self, newName):
self.name = newName
print("На ринге появляется новый боец, его имя - ", self.name )
print()
def punch(self, other):
time.sleep(1)
print(self.name,
|
.health != 0) and (fighter2.health != 0):
fighters = [fighter1, fighter2]
if fighters[random.randint(0,1)] == fighter1:
fighter1.punch(fighter2)
else:
fighter2.punch(fighter1)
print("Победу в поединке одержал " + (fighter1.name if fighter1.health > 0 else fighter2.name) + "!")
|
"наносит удар бойцу ", other.name)
other.health -= 20
print("Уровень физического состояния бойца ", other.name, " - ", other.health)
print()
fighter1 = Athlete("Владимир")
fighter2 = Athlete("Николай")
while (fighter1
|
Neptune_Optuna_integration_customize_callback.py
|
import lightgbm as lgb
import neptune.new as neptune
import neptune.new.integrations.optuna as optuna_utils
import optuna
from sklearn.datasets import load_breast_cancer
from sklearn.metrics import roc_auc_score
from sklearn.model_selection import train_test_split
def
|
(trial):
data, target = load_breast_cancer(return_X_y=True)
train_x, test_x, train_y, test_y = train_test_split(data, target, test_size=0.25)
dtrain = lgb.Dataset(train_x, label=train_y)
param = {
"verbose": -1,
"objective": "binary",
"metric": "binary_logloss",
"num_leaves": trial.suggest_int("num_leaves", 2, 256),
"feature_fraction": trial.suggest_uniform("feature_fraction", 0.2, 1.0),
"bagging_fraction": trial.suggest_uniform("bagging_fraction", 0.2, 1.0),
"min_child_samples": trial.suggest_int("min_child_samples", 3, 100),
}
gbm = lgb.train(param, dtrain)
preds = gbm.predict(test_x)
accuracy = roc_auc_score(test_y, preds)
return accuracy
# Create a Neptune Run
run = neptune.init(
api_token="ANONYMOUS", project="common/optuna-integration"
) # you can pass your credentials here
# Create a NeptuneCallback for Optuna
neptune_callback = optuna_utils.NeptuneCallback(
run,
plots_update_freq=10,
log_plot_slice=False,
log_plot_contour=False,
)
# Pass NeptuneCallback to Optuna Study .optimize()
study = optuna.create_study(direction="maximize")
study.optimize(objective, n_trials=50, callbacks=[neptune_callback])
# Stop logging to a Neptune Run
run.stop()
|
objective
|
index.js
|
/* global Shopify */
import React from "react";
import ReactDOM from "react-dom";
import ReactPixel from "react-facebook-pixel";
import ReactSnapchat from "react-snapchat-pixel";
import ReactGA from "react-ga";
import ReactPinterest from "../lib/Pinterest";
import demoCampaign from "../static/campaign.json";
import WidgetContainer from "./WidgetContainer";
import "./style.scss";
import MyEventListenerApp from "../lib/MyEventListenerApp";
import initRechargeIntegration from "../lib/integrations/initRechargeIntegration";
import isEqual from 'lodash.isequal';
import ErrorBoundary from "./ErrorBoundary";
const url = "{{__URL__}}";
const shop = "{{__SHOP__}}";
const elementId = "cb-upsell";
//
let searchOrderId = "";
let searchCampaignId = null;
let shopSettings = {};
let isCartPage = false;
let cacheCart = null;
let cacheCampaign = null;
let cacheSettings = null;
let currency = getCookie("cart_currency");
let shopifyExchangeRate;
if (
!currency &&
typeof Shopify !== "undefined" &&
Shopify &&
Shopify.currency &&
Shopify.currency.active &&
Shopify.currency.rate
) {
currency = Shopify.currency.active;
shopifyExchangeRate = parseFloat(Shopify.currency.rate);
}
const appEvent = new MyEventListenerApp("conversionBearUpsell");
// TrackJS.install({ token: 'e4d05c79e48f4002a094e444fa3700e8', application:'upsell-widget' ,window: { enabled: false , promise: false} });
let genWidget = () => null;
function getCookie(name) {
const value = '; ' + document.cookie;
const parts = value.split('; ' + name + '=');
if (parts.length == 2) {
return parts.pop().split(';').shift();
}
return '';
}
const defaultSettings = {
isDemo: false,
onLoaded: () => null,
getDemoCheckoutData: () => null,
show: () => null,
...(window.conversionBearUpsellSettings
? window.conversionBearUpsellSettings
: {})
};
const conversionBearUpsell = {
closeApp: () => {
document.getElementById(elementId).remove();
},
onLoaded: defaultSettings.onLoaded,
getDemoCheckoutData: defaultSettings.getDemoCheckoutData
};
(async () => {
genWidget = ({ campaign_id = null, dontTriggerOnLoaded = false, isCart = false }) => {
Promise.all([fetchSettings(), getCampaign(campaign_id, isCart)])
.then(([settingsData, campaign]) => {
if (dontTriggerOnLoaded) {
conversionBearUpsell.onLoaded = () => {};
}
if (!settingsData || !campaign) {
conversionBearUpsell.onLoaded();
if (isCart && !campaign) {
window.location.href = '/checkout';
}
return;
}
if (defaultSettings.isDemo) {
isCart = campaign.page_to_show == 'cart_page';
}
shopSettings = settingsData;
if (
settingsData.advanced &&
settingsData.advanced.facebook_pixel_id &&
settingsData.advanced.facebook_pixel_id !== "" &&
!window.fbq
) {
ReactPixel.init(
settingsData.advanced.facebook_pixel_id,
{},
{ debug: false, autoConfig: false },
);
if (window.fbq) {
fbq('track', 'PageView');
} else {
ReactPixel.pageView();
}
}
if (
settingsData.advanced &&
settingsData.advanced.pinterest_tag_id &&
settingsData.advanced.pinterest_tag_id !== ""
) {
ReactPinterest.init(settingsData.advanced.pinterest_tag_id);
}
if (
settingsData.advanced &&
settingsData.advanced.snapchat_pixel_id &&
settingsData.advanced.snapchat_pixel_id !== ""
) {
ReactSnapchat.init(
settingsData.advanced.snapchat_pixel_id,
{},
{ debug: false }
);
}
if (
settingsData.advanced &&
settingsData.advanced.google_analytics_id &&
settingsData.advanced.google_analytics_id !== ""
) {
ReactGA.initialize(settingsData.advanced.google_analytics_id, {
debug: false
});
}
//settings override - USAGE EXAMPLE
// <script>
// window.cbUpsellCustomSettingsOverride = {
// countdown_timer:{ show: false }
// };
// </script>
if (
window.cbUpsellCustomDesignSettingsOverride &&
typeof window.cbUpsellCustomDesignSettingsOverride === "object" &&
settingsData &&
settingsData.design &&
settingsData.design.theme
) {
for (let key of Object.keys(window.cbUpsellCustomDesignSettingsOverride)) {
if (settingsData.design.theme.hasOwnProperty(key)) {
if (
typeof window.cbUpsellCustomDesignSettingsOverride[key] === "object"
) {
for (let keyTag of Object.keys(
window.cbUpsellCustomDesignSettingsOverride[key]
)) {
settingsData.design.theme[key][keyTag] =
window.cbUpsellCustomDesignSettingsOverride[key][keyTag];
}
} else {
settingsData.design.theme[key] = window.cbUpsellCustomDesignSettingsOverride[key];
}
}
}
}
const div = document.createElement("div");
div.setAttribute("id", elementId);
document.body.appendChild(div);
// add bottom page padding for the mobile widget so the bottom part is reachable
if (window.innerWidth <= 767) {
try {
const widgetHeight = '170px';
if (document.getElementsByClassName('main')[0] && document.getElementsByClassName('main').length === 1) {
document.getElementsByClassName('main')[0].style.marginBottom = widgetHeight;
} else if (document.getElementsByTagName("body")[0].style.marginBottom === "") {
document.getElementsByTagName(
"body"
)[0].style.marginBottom = widgetHeight;
}
} catch (error) {
console.log(`cb: couldn't increase window height`);
}
}
if (isCart) {
createOrderView(campaign._id);
}
ReactDOM.render(
<ErrorBoundary
handleError={handleError}
>
<WidgetContainer
campaign={campaign}
settings={settingsData}
isDemo={defaultSettings.isDemo}
forseIsMobile={defaultSettings.forseIsMobile}
goToCheckout={goToCheckout}
conversionBearUpsell={conversionBearUpsell}
url={url}
ReactPixel={ReactPixel}
ReactSnapchat={ReactSnapchat}
ReactGA={ReactGA}
ReactPinterest={ReactPinterest}
appEvent={appEvent}
isCartPage={isCart}
currency={currency}
/>
</ErrorBoundary>,
document.getElementById(elementId)
);
})
.catch(err => {
console.log(err);
});
};
function fetchSettings() {
if (defaultSettings.isDemo && localStorage.getItem("upsellShopSettings")) {
try {
return JSON.parse(localStorage.getItem("upsellShopSettings"));
} catch (error) {
//
}
}
if(cacheSettings){
return cacheSettings;
}
return fetch(`${url}settings?shop=${shop}`).then(resp => {
if(resp.status != 200){
return null;
}
return resp.json();
});
}
function getCart() {
return fetch('/cart.js').then(res => res.json()).then(data => {
return data;
}).catch((err) => {
return null;
})
}
async function getCampaign(campaign_id = null, isCart = false, forseUpdate = false) {
try {
if (defaultSettings.isDemo) {
if (localStorage.getItem("upsellCampaing")) {
let res_demo = await fetch(`${url}default/demo_campaign?shop=${shop}`, {
headers: {
Accept: "application/json",
"Content-Type": "application/json"
},
method: "post",
body: JSON.stringify(
JSON.parse(localStorage.getItem("upsellCampaing"))
)
});
return await res_demo.json();
} else {
return Promise.resolve(demoCampaign);
}
} else if (searchOrderId || campaign_id || isCart) {
if (cacheCampaign && !forseUpdate && !campaign_id) {
return cacheCampaign;
}
let urlRequest = `${url}default/campaign?&shop=${shop}&orderId=${searchOrderId}&isMobile=${window.innerWidth <=
767}¤cy=${currency}`;
if (campaign_id) {
urlRequest = `${url}default/campaign?campaignId=${campaign_id}&shop=${shop}&isMobile=${window.innerWidth <=
767}¤cy=${currency}`;
}
const product_ids = [];
let total_price = 0;
if (isCart) {
const cart = await getCart();
if (cart.item_count < 1) {
return null;
}
cart.items.forEach((item) => {
product_ids.push(item.product_id);
});
total_price = cart.total_price ? cart.total_price / 100 : 0;
urlRequest = `${url}default/campaign_cart?shop=${shop}&isMobile=${window.innerWidth <=
767}¤cy=${currency}`;
if (localStorage.getItem('tmpOrderId')) {
urlRequest += '&tmpOrderId=' + localStorage.getItem('tmpOrderId')
}
}
if (localStorage.getItem('excludeCampaigns')) {
try {
const excludeCampaigns = JSON.parse(localStorage.getItem('excludeCampaigns'));
if (Array.isArray(excludeCampaigns)) {
urlRequest += `&excludeIds=${excludeCampaigns.join(',')}`;
}
} catch (error) {
console.log('getCampaign -> error', error);
}
}
let result = await fetch(urlRequest, {
headers: {
"Content-Type": "application/json"
},
method: isCart ? 'POST' : 'GET',
body: isCart ? JSON.stringify({
product_ids,
total_price
}) : undefined,
})
if (result.status != 200) {
return null;
}
result = await result.json();
if (result.hasOwnProperty('tmpOrderId')) {
searchOrderId = result.tmpOrderId;
localStorage.setItem('tmpOrderId', result.tmpOrderId);
}
if (result.hasOwnProperty('campaign_id_exclude') && result.campaign_id_exclude) {
try {
if (localStorage.getItem('excludeCampaigns')) {
const excludeCampaigns = JSON.parse(localStorage.getItem('excludeCampaigns'));
localStorage.setItem('excludeCampaigns', JSON.stringify([...excludeCampaigns, result.campaign_id_exclude]));
} else {
localStorage.setItem('excludeCampaigns', JSON.stringify([result.campaign_id_exclude]));
}
} catch (error) {
console.log('getCampaign -> error', error);
}
}
return result._id ? result : null;
}
return null;
} catch (error) {
return null;
}
}
function createOrderView(campaign_id) {
fetch(
`${url}default/campaign_order_view?currency=${currency}`,
{
method: 'POST',
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify({
shopOrigin: shop,
campaign_id,
tmpOrderId: localStorage.getItem('tmpOrderId'),
}),
},
)
.then((resp) => {
if (resp.status != 200) {
throw new Error('Not create order view');
}
return resp.json();
})
.then((result) => {
if (result.tmpOrderId) {
searchOrderId = result.tmpOrderId;
localStorage.setItem('tmpOrderId', result.tmpOrderId);
}
}).catch((err) => {
console.log('createOrderView -> err', err);
});
}
function createOrder(searchOrderId, body) {
return fetch(
`${url}default/order?shop=${shop}&orderId=${searchOrderId}¤cy=${currency}`,
{
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(body)
}
)
.then(resp => {
if(resp.status != 200){
throw new Error('Not create order');
}
return resp.json()
})
.then((result) => {
if (result.invoice_url) {
setTimeout(() => {
conversionBearUpsell.closeApp();
window.location.href = result.invoice_url;
}, 2000);
} else {
window.location.href = '/checkout';
}
}).catch((err) => {
window.location.href = '/checkout';
});
}
async function goToCheckout(data) {
try {
if (defaultSettings.isDemo) {
conversionBearUpsell.getDemoCheckoutData(data);
conversionBearUpsell.closeApp();
return null;
} else if (window.conversionBearUpsell.RECHARGE_PRODUCTS && data.offers) { //RECHARGE INTEGRATION
initRechargeIntegration(data.offers);
} else if (isCartPage) {
if (!data || !data.offers || !data.offers.length) {
window.location.href = '/checkout';
conversionBearUpsell.closeApp();
return null;
}
data.cart = await getCart();
return await createOrder(searchOrderId, data);
} else if (searchOrderId) {
return await createOrder(searchOrderId, data);
}
return null;
} catch (error) {
return null;
}
}
function handleError(){
console.log("handleError -> handleError")
if (
(window.location.pathname.indexOf("thank_you") > -1 ||
window.location.pathname.indexOf("orders") > -1)){
return null;
} else if(isCartPage) {
goToCheckout();
|
} else {
return null;
}
}
function setCacheSettings() {
return fetchSettings().then((settings) => {
cacheSettings = settings;
});
}
async function setCacheCampaign() {
try {
const cart = await getCart();
if (!isEqual(cart, cacheCart)) {
cacheCart = cart;
const campaign = await getCampaign(null, true, true);
cacheCampaign = campaign;
}
} catch (error) {
//
}
}
function setCheckoutClickListener() {
// eslint-disable-next-line shopify/prefer-early-return
document.addEventListener('submit', (e) => {
if (
typeof e.target.action === "string" &&
(e.target.action.slice(-5) === "/cart" ||
e.target.action.slice(-9) === "/checkout")
) {
e.preventDefault();
isCartPage = true;
genWidget({ campaign_id: searchCampaignId, isCart: isCartPage });
}
});
// eslint-disable-next-line shopify/prefer-early-return
document.addEventListener('click', (e) => {
if (e.target && e.target.className && (typeof (e.target.className) === 'string') && e.target.className.indexOf('cb-upsell-show-cart-page-widget') > -1) {
e.preventDefault();
isCartPage = true;
genWidget({ campaign_id: searchCampaignId, isCart: isCartPage });
}
});
//init custom event handlers to trigger the widget pre-checkout
if (window && window.Shopify && window.Shopify.theme) {
const themeId = window.Shopify.theme.id;
let submitButtonSelector;
switch (themeId) {
case 89030557831: //KIVO Code
document
.querySelector("#add-to-cart")
.addEventListener("click", (e) => {
setTimeout(() => {
document
.querySelectorAll(".checkout-link.button")
.forEach((element) => {
element.addEventListener("click", (e) => {
e.preventDefault();
isCartPage = true;
genWidget({
campaign_id: searchCampaignId,
isCart: isCartPage,
});
});
});
}, 1000);
});
break;
case 88979406981: //KIBO Code V2
document
.querySelector("#add-to-cart")
.addEventListener("click", (e) => {
setTimeout(() => {
document
.querySelectorAll(".checkout-link.button")
.forEach((element) => {
element.addEventListener("click", (e) => {
e.preventDefault();
isCartPage = true;
genWidget({
campaign_id: searchCampaignId,
isCart: isCartPage,
});
});
element.setAttribute('onclick','');
if(window.screen.width > 500){
let style = document.createElement('style');
style.innerHTML =
'.cb-widget-component {' +
'left: 50% !important;' +
'}';
// Get the first script tag
var ref = document.querySelector('script');
// Insert our new styles before the first script tag
ref.parentNode.insertBefore(style, ref);
}
});
}, 1000);
});
break;
default:
break;
}
if ( //TODO: extract to a function
window.theme &&
window.theme.name &&
window.theme.name.toLowerCase.includes("debutify")
) {
document
.querySelector("#AddToCart--product-template")
.addEventListener("click", (e) => {
setTimeout(() => {
document
.querySelectorAll(".ajaxcart-checkout.cart__checkout")
.forEach((element) => {
element.addEventListener("click", (e) => {
e.preventDefault();
isCartPage = true;
genWidget({
campaign_id: searchCampaignId,
isCart: isCartPage,
});
});
});
}, 500);
});
document
.querySelectorAll(".ajaxcart-checkout.cart__checkout")
.forEach((element) => {
element.addEventListener("click", (e) => {
e.preventDefault();
isCartPage = true;
genWidget({
campaign_id: searchCampaignId,
isCart: isCartPage,
});
});
});
}
}
}
if (!defaultSettings.isDemo) {
if (
(window.location.pathname.indexOf("thank_you") > -1 ||
window.location.pathname.indexOf("orders") > -1) &&
Shopify.checkout &&
Shopify.checkout.order_id
) {
searchOrderId = Shopify.checkout.order_id;
console.log('%c 🍯 Honeycomb by Conversion Bear: loaded post purchase', 'background: #FBCE10; color: white');
} else {
const parsedUrl = new URL(window.location);
if (parsedUrl.searchParams.has("cb_campaign")) {
searchCampaignId = parsedUrl.searchParams.get("cb_campaign");
} else if(window && window.customHoneycombCampaignId){
searchCampaignId = window.customHoneycombCampaignId;
}
else {
await setCacheSettings();
if (!cacheSettings.has_active_pre_purchase_funnels) {
return;
}
setCheckoutClickListener();
setCacheCampaign();
if (window.location.pathname.indexOf("cart") > -1) {
// on cart page
setInterval(() => {
setCacheCampaign();
}, 3000);
}
console.log('%c 🍯 Honeycomb by Conversion Bear: loaded pre purchase', 'background: #FBCE10; color: white');
setTimeout(() => {
conversionBearUpsell.onLoaded();
}, 50);
return;
}
}
}
genWidget({ campaign_id: searchCampaignId });
})();
export function closeApp() {
conversionBearUpsell.closeApp();
}
export function show(campaign_id = "first") {
genWidget({ campaign_id: searchCampaignId, dontTriggerOnLoaded: true });
}
export function showCartPageWidget() {
isCartPage = true;
genWidget({ campaign_id: searchCampaignId, isCart: isCartPage });
}
| |
workflow_test.go
|
// Copyright 2017 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package daisy
import (
"bytes"
"context"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"regexp"
"strings"
"sync"
"testing"
"time"
"cloud.google.com/go/storage"
"github.com/stretchr/testify/assert"
computeAlpha "google.golang.org/api/compute/v0.alpha"
computeBeta "google.golang.org/api/compute/v0.beta"
"google.golang.org/api/compute/v1"
"google.golang.org/api/option"
)
func TestAddDependency(t *testing.T) {
w := &Workflow{}
a, _ := w.NewStep("a")
b, _ := w.NewStep("b")
otherW := &Workflow{}
c, _ := otherW.NewStep("c")
tests := []struct {
desc string
in1, in2 *Step
shouldErr bool
}{
{"good case", a, b, false},
{"idempotent good case", a, b, false},
{"bad case 1", a, c, true},
{"bad case 2", c, b, true},
}
for _, tt := range tests {
if err := w.AddDependency(tt.in1, tt.in2); err == nil && tt.shouldErr {
t.Errorf("%s: should have erred but didn't", tt.desc)
} else if err != nil && !tt.shouldErr {
t.Errorf("%s: unexpected error: %v", tt.desc, err)
}
}
wantDeps := map[string][]string{"a": {"b"}}
if diffRes := diff(w.Dependencies, wantDeps, 0); diffRes != "" {
t.Errorf("incorrect dependencies: (-got,+want)\n%s", diffRes)
}
}
func TestDaisyBkt(t *testing.T) {
client, err := newTestGCSClient()
if err != nil {
t.Fatal(err)
}
project := "foo-project"
got, err := daisyBkt(context.Background(), client, project)
if err != nil {
t.Fatal(err)
}
want := project + "-daisy-bkt"
if got != project+"-daisy-bkt" {
t.Errorf("bucket does not match, got: %q, want: %q", got, want)
}
project = "bar-project"
got, err = daisyBkt(context.Background(), client, project)
if err != nil {
t.Fatal(err)
}
want = project + "-daisy-bkt"
if got != project+"-daisy-bkt" {
t.Errorf("bucket does not match, got: %q, want: %q", got, want)
}
}
func TestCleanup(t *testing.T) {
cleanedup1 := false
cleanedup2 := false
cleanup1 := func() DError {
cleanedup1 = true
return nil
}
cleanup2 := func() DError {
cleanedup2 = true
return nil
}
cleanupFail := func() DError {
return Errf("failed cleanup")
}
w := testWorkflow()
w.addCleanupHook(cleanup1)
w.addCleanupHook(cleanupFail)
w.addCleanupHook(cleanup2)
w.cleanup()
if !cleanedup1 {
t.Error("cleanup1 was not run")
}
if !cleanedup2 {
t.Error("cleanup2 was not run")
}
}
func TestGenName(t *testing.T)
|
func TestGetSourceGCSAPIPath(t *testing.T) {
w := testWorkflow()
w.sourcesPath = "my/sources"
got := w.getSourceGCSAPIPath("foo")
want := "https://storage.cloud.google.com/my/sources/foo"
if got != want {
t.Errorf("unexpected result: got: %q, want %q", got, want)
}
}
func TestCancelWorkflow_IsIdempotent(t *testing.T) {
w := testWorkflow()
if w.isCanceled {
t.Error("Didn't expect workflow to be canceled.")
}
w.CancelWorkflow()
w.CancelWorkflow()
if !w.isCanceled {
t.Error("Expect workflow to be canceled.")
}
}
func TestCancelWithReason_IsCallableMultipleTimes_AndKeepsFirstCancelReason(t *testing.T) {
w := testWorkflow()
reason1 := "reason1"
reason2 := "reason2"
w.CancelWithReason(reason1)
w.CancelWithReason(reason2)
if !w.isCanceled {
t.Error("Expect workflow to be canceled.")
}
if w.getCancelReason() != reason1 {
t.Errorf("Expected reason1 mismatch. got=%q, want=%q", w.getCancelReason(), reason1)
}
}
func TestCancelWorkflow_RecoversFromManuallyClosedChannel(t *testing.T) {
w := testWorkflow()
if w.isCanceled {
t.Error("Didn't expect workflow to be canceled.")
}
close(w.Cancel)
w.CancelWorkflow()
if !w.isCanceled {
t.Error("Expect workflow to be canceled.")
}
}
func TestNewFromFileError(t *testing.T) {
td, err := ioutil.TempDir(os.TempDir(), "")
if err != nil {
t.Fatalf("error creating temp dir: %v", err)
}
defer os.RemoveAll(td)
tf := filepath.Join(td, "test.wf.json")
tests := []struct{ data, error string }{
{
`{"test":["1", "2",]}`,
tf + ": JSON syntax error in line 1: invalid character ']' looking for beginning of value \n{\"test\":[\"1\", \"2\",]}\n ^",
},
{
`{"test":{"key1":"value1" "key2":"value2"}}`,
tf + ": JSON syntax error in line 1: invalid character '\"' after object key:value pair \n{\"test\":{\"key1\":\"value1\" \"key2\":\"value2\"}}\n ^",
},
{
`{"test": value}`,
tf + ": JSON syntax error in line 1: invalid character 'v' looking for beginning of value \n{\"test\": value}\n ^",
},
{
`{"test": "value"`,
tf + ": JSON syntax error in line 1: unexpected end of JSON input \n{\"test\": \"value\"\n ^",
},
{
"{\n\"test\":[\"1\", \"2\",],\n\"test2\":[\"1\", \"2\"]\n}",
tf + ": JSON syntax error in line 2: invalid character ']' looking for beginning of value \n\"test\":[\"1\", \"2\",],\n ^",
},
}
for i, tt := range tests {
if err := ioutil.WriteFile(tf, []byte(tt.data), 0600); err != nil {
t.Fatalf("error creating json file: %v", err)
}
if _, err := NewFromFile(tf); err == nil {
t.Errorf("expected error, got nil for test %d", i+1)
} else if err.Error() != tt.error {
t.Errorf("did not get expected error from NewFromFile():\ngot: %q\nwant: %q", err.Error(), tt.error)
}
}
}
func TestNewIncludedWorkflowFromFile_UsesResourcesFromParent(t *testing.T) {
parent := New()
parent.workflowDir = "./test_data"
included, err := parent.NewIncludedWorkflowFromFile("TestNewIncludedWorkflowFromFile_UsesResourcesFromParent.wf.json")
if err != nil {
t.Fatal(err)
}
assertEqual(t, parent.Cancel, included.Cancel, "Cancel")
assertEqual(t, parent, included.parent, "parent")
assertEqual(t, parent.disks, included.disks, "disks")
assertEqual(t, parent.forwardingRules, included.forwardingRules, "forwardingRules")
assertEqual(t, parent.images, included.images, "images")
assertEqual(t, parent.machineImages, included.machineImages, "machineImages")
assertEqual(t, parent.instances, included.instances, "instances")
assertEqual(t, parent.networks, included.networks, "networks")
assertEqual(t, parent.subnetworks, included.subnetworks, "subnetworks")
assertEqual(t, parent.targetInstances, included.targetInstances, "targetInstances")
assertEqual(t, parent.snapshots, included.snapshots, "snapshots")
assertEqual(t, parent.objects, included.objects, "objects")
}
func assertEqual(t *testing.T, expected, actual interface{}, msg string) {
t.Helper()
if expected != actual {
t.Error(msg)
}
}
func TestNewFromFile_ReadsChildWorkflows(t *testing.T) {
parent, derr := NewFromFile("./test_data/TestNewFromFile_ReadsChildWorkflows.parent.wf.json")
if derr != nil {
t.Fatal(derr)
}
// Included Workflow
includedStep1 := parent.Steps["include-workflow"].IncludeWorkflow
assert.NotNil(t, includedStep1, "NewFromFile should read and parse included workflow")
assert.Equal(t, map[string]string{
"k1": "v1",
}, includedStep1.Vars, "included workflow should have variables that were declared in its step in the parent.")
includedStep2 := includedStep1.Workflow.Steps["include-workflow"].IncludeWorkflow
assert.NotNil(t, includedStep2, "NewFromFile should read and parse included workflows recursively")
assert.Equal(t, map[string]string{
"k3": "v3",
}, includedStep2.Vars, "included workflow should have variables that were declared in its step in the parent.")
// Sub Workflow
subStep1 := parent.Steps["sub-workflow"].SubWorkflow
assert.NotNil(t, subStep1, "NewFromFile should read and parse included workflow")
assert.Equal(t, map[string]string{
"k2": "v2",
}, subStep1.Vars, "sub workflow should have variables that were declared in its step in the parent.")
subStep2 := subStep1.Workflow.Steps["sub-workflow"].SubWorkflow
assert.NotNil(t, subStep2, "NewFromFile should read and parse sub workflows recursively")
assert.Equal(t, map[string]string{
"k4": "v4",
}, subStep2.Vars, "sub workflow should have variables that were declared in its step in the parent.")
}
func TestNewFromFile(t *testing.T) {
got, derr := NewFromFile("./test_data/test.wf.json")
if derr != nil {
t.Fatal(derr)
}
wd, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
want := New()
// These are difficult to validate and irrelevant, so we cheat.
want.id = got.ID()
want.Cancel = got.Cancel
want.cleanupHooks = got.cleanupHooks
want.disks = newDiskRegistry(want)
want.images = newImageRegistry(want)
want.machineImages = newMachineImageRegistry(want)
want.instances = newInstanceRegistry(want)
want.networks = newNetworkRegistry(want)
want.workflowDir = filepath.Join(wd, "test_data")
want.Name = "some-name"
want.Project = "some-project"
want.Zone = "us-central1-a"
want.GCSPath = "gs://some-bucket/images"
want.OAuthPath = filepath.Join(wd, "test_data", "somefile")
want.Sources = map[string]string{}
want.autovars = map[string]string{}
want.Vars = map[string]Var{
"bootstrap_instance_name": {Value: "bootstrap-${NAME}", Required: true},
"machine_type": {Value: "n1-standard-1"},
"key1": {Value: "var1"},
"key2": {Value: "var2"},
}
want.Steps = map[string]*Step{
"create-disks": {
name: "create-disks",
CreateDisks: &CreateDisks{
{
Disk: compute.Disk{
Name: "bootstrap",
SourceImage: "projects/windows-cloud/global/images/family/windows-server-2016-core",
Type: "pd-ssd",
},
SizeGb: "50",
},
{
Disk: compute.Disk{
Name: "image",
SourceImage: "projects/windows-cloud/global/images/family/windows-server-2016-core",
Type: "pd-standard",
},
SizeGb: "50",
},
},
},
"${bootstrap_instance_name}": {
name: "${bootstrap_instance_name}",
CreateInstances: &CreateInstances{
Instances: []*Instance{
{
Instance: compute.Instance{
Name: "${bootstrap_instance_name}",
Disks: []*compute.AttachedDisk{{Source: "bootstrap"}, {Source: "image"}},
MachineType: "${machine_type}",
},
InstanceBase: InstanceBase{
StartupScript: "shutdown /h",
Scopes: []string{"scope1", "scope2"},
},
Metadata: map[string]string{"test_metadata": "this was a test"},
},
},
InstancesBeta: []*InstanceBeta{
{
Instance: computeBeta.Instance{
Name: "${bootstrap_instance_name}",
Disks: []*computeBeta.AttachedDisk{{Source: "bootstrap"}, {Source: "image"}},
MachineType: "${machine_type}",
},
InstanceBase: InstanceBase{
StartupScript: "shutdown /h",
Scopes: []string{"scope1", "scope2"},
},
Metadata: map[string]string{"test_metadata": "this was a test"},
},
},
},
},
"${bootstrap_instance_name}-stopped": {
name: "${bootstrap_instance_name}-stopped",
Timeout: "1h",
WaitForInstancesSignal: &WaitForInstancesSignal{{Name: "${bootstrap_instance_name}", Stopped: true, Interval: "1s"}},
},
"postinstall": {
name: "postinstall",
CreateInstances: &CreateInstances{
Instances: []*Instance{
{
Instance: compute.Instance{
Name: "postinstall",
Disks: []*compute.AttachedDisk{{Source: "image"}, {Source: "bootstrap"}},
MachineType: "${machine_type}",
},
InstanceBase: InstanceBase{
StartupScript: "shutdown /h",
Scopes: []string{"scope3", "scope4"},
},
},
{
Instance: compute.Instance{
Name: "postinstallBeta",
MachineType: "${machine_type}",
},
},
},
InstancesBeta: []*InstanceBeta{
{
Instance: computeBeta.Instance{
Name: "postinstall",
Disks: []*computeBeta.AttachedDisk{{Source: "image"}, {Source: "bootstrap"}},
MachineType: "${machine_type}",
},
InstanceBase: InstanceBase{
StartupScript: "shutdown /h",
Scopes: []string{"scope3", "scope4"},
},
},
{
Instance: computeBeta.Instance{
Name: "postinstallBeta",
MachineType: "${machine_type}",
SourceMachineImage: "source-machine-image",
},
},
},
},
},
"postinstall-stopped": {
name: "postinstall-stopped",
WaitForInstancesSignal: &WaitForInstancesSignal{{Name: "postinstall", Stopped: true}},
},
"create-image-locality": {
name: "create-image-locality",
CreateImages: &CreateImages{
Images: []*Image{{
Image: compute.Image{Name: "image-from-local-disk", SourceDisk: "local-image", StorageLocations: []string{"europe-west1"}, Description: "Some Ubuntu", Family: "ubuntu-1404"},
ImageBase: ImageBase{OverWrite: false,
Resource: Resource{Project: "a_project", NoCleanup: true, ExactName: false},
},
GuestOsFeatures: []string{"VIRTIO_SCSI_MULTIQUEUE", "UBUNTU", "MULTI_IP_SUBNET"},
}},
ImagesAlpha: []*ImageAlpha{{
Image: computeAlpha.Image{Name: "image-from-local-disk", SourceDisk: "local-image", StorageLocations: []string{"europe-west1"}, Description: "Some Ubuntu", Family: "ubuntu-1404"},
ImageBase: ImageBase{OverWrite: false,
Resource: Resource{Project: "a_project", NoCleanup: true, ExactName: false},
},
GuestOsFeatures: []string{"VIRTIO_SCSI_MULTIQUEUE", "UBUNTU", "MULTI_IP_SUBNET"},
}},
ImagesBeta: []*ImageBeta{{
Image: computeBeta.Image{Name: "image-from-local-disk", SourceDisk: "local-image", StorageLocations: []string{"europe-west1"}, Description: "Some Ubuntu", Family: "ubuntu-1404"},
ImageBase: ImageBase{OverWrite: false,
Resource: Resource{Project: "a_project", NoCleanup: true, ExactName: false},
},
GuestOsFeatures: []string{"VIRTIO_SCSI_MULTIQUEUE", "UBUNTU", "MULTI_IP_SUBNET"},
}},
},
},
"create-image": {
name: "create-image",
CreateImages: &CreateImages{
Images: []*Image{{
Image: compute.Image{Name: "image-from-disk", SourceDisk: "image", Description: "Microsoft, SQL Server 2016 Web, on Windows Server 2019", Family: "sql-web-2016-win-2019"},
ImageBase: ImageBase{OverWrite: true,
Resource: Resource{Project: "a_project", NoCleanup: true, ExactName: true},
},
GuestOsFeatures: []string{"VIRTIO_SCSI_MULTIQUEUE", "WINDOWS", "MULTI_IP_SUBNET"},
}},
ImagesAlpha: []*ImageAlpha{{
Image: computeAlpha.Image{Name: "image-from-disk", SourceDisk: "image", Description: "Microsoft, SQL Server 2016 Web, on Windows Server 2019", Family: "sql-web-2016-win-2019"},
ImageBase: ImageBase{OverWrite: true,
Resource: Resource{Project: "a_project", NoCleanup: true, ExactName: true},
},
GuestOsFeatures: []string{"VIRTIO_SCSI_MULTIQUEUE", "WINDOWS", "MULTI_IP_SUBNET"},
}},
ImagesBeta: []*ImageBeta{{
Image: computeBeta.Image{Name: "image-from-disk", SourceDisk: "image", Description: "Microsoft, SQL Server 2016 Web, on Windows Server 2019", Family: "sql-web-2016-win-2019"},
ImageBase: ImageBase{OverWrite: true,
Resource: Resource{Project: "a_project", NoCleanup: true, ExactName: true},
},
GuestOsFeatures: []string{"VIRTIO_SCSI_MULTIQUEUE", "WINDOWS", "MULTI_IP_SUBNET"},
}},
},
},
"create-image-guest-os-features-compute-api": {
name: "create-image-guest-os-features-compute-api",
CreateImages: &CreateImages{
Images: []*Image{{
Image: compute.Image{Name: "image-from-disk", SourceDisk: "image", Description: "GuestOS Features Compute API", Family: "guest-os"},
ImageBase: ImageBase{OverWrite: true,
Resource: Resource{Project: "a_project", NoCleanup: true, ExactName: true},
},
GuestOsFeatures: []string{"VIRTIO_SCSI_MULTIQUEUE", "WINDOWS", "MULTI_IP_SUBNET"},
}},
ImagesAlpha: []*ImageAlpha{{
Image: computeAlpha.Image{Name: "image-from-disk", SourceDisk: "image", Description: "GuestOS Features Compute API", Family: "guest-os"},
ImageBase: ImageBase{OverWrite: true,
Resource: Resource{Project: "a_project", NoCleanup: true, ExactName: true},
},
GuestOsFeatures: []string{"VIRTIO_SCSI_MULTIQUEUE", "WINDOWS", "MULTI_IP_SUBNET"},
}},
ImagesBeta: []*ImageBeta{{
Image: computeBeta.Image{Name: "image-from-disk", SourceDisk: "image", Description: "GuestOS Features Compute API", Family: "guest-os"},
ImageBase: ImageBase{OverWrite: true,
Resource: Resource{Project: "a_project", NoCleanup: true, ExactName: true},
},
GuestOsFeatures: []string{"VIRTIO_SCSI_MULTIQUEUE", "WINDOWS", "MULTI_IP_SUBNET"},
}},
},
},
"create-machine-image": {
name: "create-machine-image",
CreateMachineImages: &CreateMachineImages{
{MachineImage: computeBeta.MachineImage{
Name: "machine-image-from-instance",
SourceInstance: "source-instance",
StorageLocations: []string{"eu", "us-west2"},
}},
},
},
}
want.Dependencies = map[string][]string{
"create-disks": {},
"bootstrap": {"create-disks"},
"bootstrap-stopped": {"bootstrap"},
"postinstall": {"bootstrap-stopped"},
"postinstall-stopped": {"postinstall"},
"create-image-locality": {"postinstall-stopped"},
"create-image": {"create-image-locality"},
"create-machine-image": {"create-image"},
}
for _, s := range want.Steps {
s.w = want
}
if diffRes := diff(got, want, 0); diffRes != "" {
t.Errorf("parsed workflow does not match expectation: (-got +want)\n%s", diffRes)
}
}
func TestNewStep(t *testing.T) {
w := &Workflow{}
s, err := w.NewStep("s")
wantS := &Step{name: "s", w: w}
if s == nil || s.name != "s" || s.w != w {
t.Errorf("step does not meet expectation: got: %v, want: %v", s, wantS)
}
if err != nil {
t.Error("unexpected error when creating new step")
}
s, err = w.NewStep("s")
if s != nil {
t.Errorf("step should not have been created: %v", s)
}
if err == nil {
t.Error("should have erred, but didn't")
}
}
func TestNewFromFile_SupportsNestedVariables(t *testing.T) {
cases := []struct {
name string
template string
}{
{"Variable in filename", "./test_data/TestNewFromFile_SupportsNestedVariables_VarInFilename.parent.wf.json"},
{"No variable in filename", "./test_data/TestNewFromFile_SupportsNestedVariables.parent.wf.json"}}
for _, tt := range cases {
t.Run(tt.name, func(t *testing.T) {
ctx := context.Background()
client, err := newTestGCSClient()
if err != nil {
t.Fatal(err)
}
td, err := ioutil.TempDir(os.TempDir(), "")
if err != nil {
t.Fatalf("error creating temp dir: %v", err)
}
defer os.RemoveAll(td)
tf := filepath.Join(td, "test.cred")
if err := ioutil.WriteFile(tf, []byte(`{ "type": "service_account" }`), 0600); err != nil {
t.Fatalf("error creating temp file: %v", err)
}
wf, err := NewFromFile(tt.template)
if err != nil {
t.Fatal(err)
}
wf.Zone = "wf-zone"
wf.Project = "bar-project"
wf.OAuthPath = tf
wf.Logger = &MockLogger{}
wf.StorageClient = client
wf.externalLogging = true
err = wf.populate(ctx)
if err != nil {
t.Fatal(err)
}
child := wf.Steps["include-workflow"].IncludeWorkflow
assert.Equal(t, "v1", child.Vars["k1"])
assert.Equal(t, "include-workflow-image-v1", (*child.Workflow.Steps["create-disks"].CreateDisks)[0].SourceImage)
})
}
}
func TestPopulate(t *testing.T) {
ctx := context.Background()
client, err := newTestGCSClient()
if err != nil {
t.Fatal(err)
}
td, err := ioutil.TempDir(os.TempDir(), "")
if err != nil {
t.Fatalf("error creating temp dir: %v", err)
}
defer os.RemoveAll(td)
tf := filepath.Join(td, "test.cred")
if err := ioutil.WriteFile(tf, []byte(`{ "type": "service_account" }`), 0600); err != nil {
t.Fatalf("error creating temp file: %v", err)
}
called := false
var stepPopErr DError
stepPop := func(ctx context.Context, s *Step) DError {
called = true
return stepPopErr
}
got := New()
got.Name = "${wf_name}"
got.Zone = "wf-zone"
got.Project = "bar-project"
got.OAuthPath = tf
got.Logger = &MockLogger{}
got.Vars = map[string]Var{
"bucket": {Value: "wf-bucket", Required: true},
"step_name": {Value: "step1"},
"timeout": {Value: "60m"},
"path": {Value: "./test_sub.wf.json"},
"wf_name": {Value: "wf-name"},
"test-var": {Value: "${ZONE}-this-should-populate-${NAME}"},
}
got.Steps = map[string]*Step{
"${NAME}-${step_name}": {
w: got,
Timeout: "${timeout}",
testType: &mockStep{
populateImpl: stepPop,
},
},
}
got.StorageClient = client
got.externalLogging = true
if err := got.populate(ctx); err != nil {
t.Fatalf("error populating workflow: %v", err)
}
want := New()
// These are difficult to validate and irrelevant, so we cheat.
want.id = got.id
want.Cancel = got.Cancel
want.cleanupHooks = got.cleanupHooks
want.StorageClient = got.StorageClient
want.cloudLoggingClient = got.cloudLoggingClient
want.Logger = got.Logger
want.disks = newDiskRegistry(want)
want.images = newImageRegistry(want)
want.machineImages = newMachineImageRegistry(want)
want.instances = newInstanceRegistry(want)
want.networks = newNetworkRegistry(want)
want.Name = "wf-name"
want.GCSPath = "gs://bar-project-daisy-bkt"
want.Zone = "wf-zone"
want.Project = "bar-project"
want.OAuthPath = tf
want.externalLogging = true
want.Sources = map[string]string{}
want.DefaultTimeout = defaultTimeout
want.defaultTimeout = 10 * time.Minute
want.Vars = map[string]Var{
"bucket": {Value: "wf-bucket", Required: true},
"step_name": {Value: "step1"},
"timeout": {Value: "60m"},
"path": {Value: "./test_sub.wf.json"},
"wf_name": {Value: "wf-name"},
"test-var": {Value: "wf-zone-this-should-populate-wf-name"},
}
want.autovars = got.autovars
want.bucket = "bar-project-daisy-bkt"
want.scratchPath = got.scratchPath
want.sourcesPath = fmt.Sprintf("%s/sources", got.scratchPath)
want.logsPath = fmt.Sprintf("%s/logs", got.scratchPath)
want.outsPath = fmt.Sprintf("%s/outs", got.scratchPath)
want.username = got.username
want.Steps = map[string]*Step{
"wf-name-step1": {
name: "wf-name-step1",
Timeout: "60m",
timeout: time.Duration(60 * time.Minute),
testType: &mockStep{
populateImpl: stepPop,
},
},
}
want.Dependencies = map[string][]string{}
for _, s := range want.Steps {
s.w = want
}
if diffRes := diff(got, want, 0); diffRes != "" {
t.Errorf("parsed workflow does not match expectation: (-got +want)\n%s", diffRes)
}
if !called {
t.Error("did not call step's populate")
}
stepPopErr = Errf("error")
wantErr := Errf("error populating step \"wf-name-step1\": %v", stepPopErr)
if err := got.populate(ctx); err.Error() != wantErr.Error() {
t.Errorf("did not get proper step populate error: %v != %v", err, wantErr)
}
}
func TestRequiredVars(t *testing.T) {
w := testWorkflow()
tests := []struct {
desc string
vars map[string]Var
shouldErr bool
}{
{"normal case", map[string]Var{"foo": {Value: "foo", Required: true, Description: "foo"}}, false},
{"missing req case", map[string]Var{"foo": {Value: "", Required: true, Description: "foo"}}, true},
}
for _, tt := range tests {
w.Vars = tt.vars
err := w.populate(context.Background())
if tt.shouldErr && err == nil {
t.Errorf("%s: should have erred, but didn't", tt.desc)
} else if !tt.shouldErr && err != nil {
t.Errorf("%s: unexpected error: %v", tt.desc, err)
}
}
}
func testTraverseWorkflow(mockRun func(i int) func(context.Context, *Step) DError) *Workflow {
// s0---->s1---->s3
// \ /
// --->s2---
// s4
w := testWorkflow()
w.Steps = map[string]*Step{
"s0": {name: "s0", testType: &mockStep{runImpl: mockRun(0)}, w: w},
"s1": {name: "s1", testType: &mockStep{runImpl: mockRun(1)}, w: w},
"s2": {name: "s2", testType: &mockStep{runImpl: mockRun(2)}, w: w},
"s3": {name: "s3", testType: &mockStep{runImpl: mockRun(3)}, w: w},
"s4": {name: "s4", testType: &mockStep{runImpl: mockRun(4)}, w: w},
}
w.Dependencies = map[string][]string{
"s1": {"s0"},
"s2": {"s0"},
"s3": {"s1", "s2"},
}
return w
}
func TestTraverseDAG(t *testing.T) {
ctx := context.Background()
var callOrder []int
errs := make([]DError, 5)
var rw sync.Mutex
mockRun := func(i int) func(context.Context, *Step) DError {
return func(_ context.Context, _ *Step) DError {
rw.Lock()
defer rw.Unlock()
callOrder = append(callOrder, i)
return errs[i]
}
}
// Check call order: s1 and s2 must be after s0, s3 must be after s1 and s2.
checkCallOrder := func() error {
rw.Lock()
defer rw.Unlock()
stepOrderNum := []int{-1, -1, -1, -1, -1}
for i, stepNum := range callOrder {
stepOrderNum[stepNum] = i
}
// If s1 was called, check it was called after s0.
if stepOrderNum[1] != -1 && stepOrderNum[1] < stepOrderNum[0] {
return errors.New("s1 was called before s0")
}
// If s2 was called, check it was called after s0.
if stepOrderNum[2] != -1 && stepOrderNum[2] < stepOrderNum[0] {
return errors.New("s2 was called before s0")
}
// If s3 was called, check it was called after s1 and s2.
if stepOrderNum[3] != -1 {
if stepOrderNum[3] < stepOrderNum[1] {
return errors.New("s3 was called before s1")
}
if stepOrderNum[3] < stepOrderNum[2] {
return errors.New("s3 was called before s2")
}
}
return nil
}
// Normal, good run.
w := testTraverseWorkflow(mockRun)
if err := w.Run(ctx); err != nil {
t.Errorf("unexpected error: %s", err)
}
if err := checkCallOrder(); err != nil {
t.Errorf("call order error: %s", err)
}
callOrder = []int{}
errs = make([]DError, 5)
// s2 failure.
w = testTraverseWorkflow(mockRun)
errs[2] = Errf("failure")
want := w.Steps["s2"].wrapRunError(errs[2])
if err := w.Run(ctx); err.Error() != want.Error() {
t.Errorf("unexpected error: %s != %s", err, want)
}
if err := checkCallOrder(); err != nil {
t.Errorf("call order error: %s", err)
}
}
func TestForceCleanupSetOnRunError(t *testing.T) {
doTestForceCleanup(t, true, true, true)
}
func TestForceCleanupNotSetOnRunErrorWhenForceCleanupFalse(t *testing.T) {
doTestForceCleanup(t, true, false, false)
}
func TestForceCleanupNotSetOnNoErrorWhenForceCleanupTrue(t *testing.T) {
doTestForceCleanup(t, false, true, false)
}
func TestForceCleanupNotSetOnNoErrorWhenForceCleanupFalse(t *testing.T) {
doTestForceCleanup(t, false, false, false)
}
func doTestForceCleanup(t *testing.T, runErrorFromStep bool, forceCleanupOnError bool, forceCleanup bool) {
mockRun := func(i int) func(context.Context, *Step) DError {
return func(_ context.Context, _ *Step) DError {
if runErrorFromStep {
return Errf("failure")
}
return nil
}
}
ctx := context.Background()
w := testWorkflow()
w.ForceCleanupOnError = forceCleanupOnError
w.Steps = map[string]*Step{
"s0": {name: "s0", testType: &mockStep{runImpl: mockRun(0)}, w: w},
}
if err := w.Run(ctx); (err != nil) != runErrorFromStep {
if runErrorFromStep {
t.Errorf("expected error from w.Run but nil received")
} else {
t.Errorf("expected no error from w.Run but %v received", err)
}
}
if w.forceCleanup != forceCleanup {
t.Errorf("w.forceCleanup should be set to %v but is %v", forceCleanup, w.forceCleanup)
}
}
func TestPrint(t *testing.T) {
data := []byte(`{
"Name": "some-name",
"Project": "some-project",
"Zone": "some-zone",
"GCSPath": "gs://some-bucket/images",
"Vars": {
"instance_name": "i1",
"machine_type": {"Value": "n1-standard-1", "Required": true}
},
"Steps": {
"${instance_name}Delete": {
"DeleteResources": {
"Instances": ["${instance_name}"]
}
}
}
}`)
want := `{
"Name": "some-name",
"Project": "some-project",
"Zone": "some-zone",
"GCSPath": "gs://some-bucket/images",
"Vars": {
"instance_name": {
"Value": "i1"
},
"machine_type": {
"Value": "n1-standard-1",
"Required": true
}
},
"Steps": {
"i1Delete": {
"Timeout": "10m",
"DeleteResources": {
"Instances": [
"i1"
]
}
}
},
"DefaultTimeout": "10m",
"ForceCleanupOnError": false
}
`
td, err := ioutil.TempDir(os.TempDir(), "")
if err != nil {
t.Fatalf("error creating temp dir: %v", err)
}
defer os.RemoveAll(td)
tf := filepath.Join(td, "test.wf.json")
ioutil.WriteFile(tf, data, 0600)
got, err := NewFromFile(tf)
if err != nil {
t.Fatal(err)
}
got.ComputeClient, _ = newTestGCEClient()
got.StorageClient, _ = newTestGCSClient()
old := os.Stdout
r, w, err := os.Pipe()
if err != nil {
t.Fatal(err)
}
os.Stdout = w
got.Print(context.Background())
w.Close()
os.Stdout = old
var buf bytes.Buffer
if _, err := io.Copy(&buf, r); err != nil {
t.Fatal(err)
}
if diffRes := diff(buf.String(), want, 0); diffRes != "" {
t.Errorf("printed workflow does not match expectation: (-got +want)\n%s", diffRes)
}
}
func testValidateErrors(w *Workflow, want string) error {
if err := w.Validate(context.Background()); err == nil {
return errors.New("expected error, got nil")
} else if err.Error() != want {
return fmt.Errorf("did not get expected error from Validate():\ngot: %q\nwant: %q", err.Error(), want)
}
select {
case <-w.Cancel:
return nil
default:
return errors.New("expected cancel to be closed after error")
}
}
func TestValidateErrors(t *testing.T) {
// Error from validateRequiredFields().
w := testWorkflow()
w.Name = "1"
want := "error validating workflow: workflow field 'Name' must start with a letter and only contain letters, numbers, and hyphens"
if err := testValidateErrors(w, want); err != nil {
t.Error(err)
}
// Error from populate().
w = testWorkflow()
w.Steps = map[string]*Step{"s0": {Timeout: "10", testType: &mockStep{}}}
want = "error populating workflow: error populating step \"s0\": time: missing unit in duration \"10\""
if err := testValidateErrors(w, want); err != nil {
t.Error(err)
}
// Error from validate().
w = testWorkflow()
w.Steps = map[string]*Step{"s0": {testType: &mockStep{}}}
w.Project = "foo"
want = "error validating workflow: bad project lookup: \"foo\", error: APIError: bad project"
if err := testValidateErrors(w, want); err != nil {
t.Error(err)
}
}
func TestWrite(t *testing.T) {
var buf bytes.Buffer
testBucket := "bucket"
testObject := "object"
var gotObj string
var gotBkt string
nameRgx := regexp.MustCompile(`"name":"([^"].*)"`)
uploadRgx := regexp.MustCompile(`/b/([^/]+)/o?.*uploadType=multipart.*`)
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
u := r.URL.String()
m := r.Method
if match := uploadRgx.FindStringSubmatch(u); m == "POST" && match != nil {
body, _ := ioutil.ReadAll(r.Body)
buf.Write(body)
gotObj = nameRgx.FindStringSubmatch(string(body))[1]
gotBkt = match[1]
fmt.Fprintf(w, `{"kind":"storage#object","bucket":"%s","name":"%s"}`, gotBkt, gotObj)
}
}))
gcsClient, err := storage.NewClient(context.Background(), option.WithEndpoint(ts.URL), option.WithHTTPClient(http.DefaultClient))
if err != nil {
t.Fatal(err)
}
l := GCSLogger{
client: gcsClient,
bucket: testBucket,
object: testObject,
ctx: context.Background(),
}
tests := []struct {
test, want string
}{
{"test log 1\n", "test log 1\n"},
{"test log 2\n", "test log 1\ntest log 2\n"},
}
for _, tt := range tests {
l.Write([]byte(tt.test))
if gotObj != testObject {
t.Errorf("object does not match, want: %q, got: %q", testObject, gotObj)
}
if gotBkt != testBucket {
t.Errorf("bucket does not match, want: %q, got: %q", testBucket, gotBkt)
}
if !strings.Contains(buf.String(), tt.want) {
t.Errorf("expected text did not get sent to GCS, want: %q, got: %q", tt.want, buf.String())
}
if l.buf.String() != tt.want {
t.Errorf("buffer does mot match expectation, want: %q, got: %q", tt.want, l.buf.String())
}
}
}
func TestRunStepTimeout(t *testing.T) {
w := testWorkflow()
s, _ := w.NewStep("test")
s.timeout = 1 * time.Nanosecond
s.testType = &mockStep{runImpl: func(ctx context.Context, s *Step) DError {
time.Sleep(1 * time.Second)
return nil
}}
want := `step "test" did not complete within the specified timeout of 1ns`
if err := w.runStep(context.Background(), s); err == nil || err.Error() != want {
t.Errorf("did not get expected error, got: %q, want: %q", err.Error(), want)
}
}
func TestPopulateClients(t *testing.T) {
w := testWorkflow()
initialComputeClient := w.ComputeClient
tryPopulateClients(t, w)
if w.ComputeClient != initialComputeClient {
t.Errorf("Should not repopulate compute client.")
}
w.ComputeClient = nil
tryPopulateClients(t, w)
if w.ComputeClient == nil {
t.Errorf("Did not populate compute client.")
}
initialStorageClient := w.StorageClient
tryPopulateClients(t, w)
if w.StorageClient != initialStorageClient {
t.Errorf("Should not repopulate storage client.")
}
w.StorageClient = nil
tryPopulateClients(t, w)
if w.StorageClient == nil {
t.Errorf("Did not populate storage client.")
}
initialCloudLoggingClient := w.cloudLoggingClient
tryPopulateClients(t, w)
if w.cloudLoggingClient != initialCloudLoggingClient {
t.Errorf("Should not repopulate logging client.")
}
w.cloudLoggingClient = nil
w.externalLogging = false
tryPopulateClients(t, w)
if w.cloudLoggingClient != nil {
t.Errorf("Should not populate Cloud Logging client.")
}
w.cloudLoggingClient = nil
w.externalLogging = true
tryPopulateClients(t, w)
if w.cloudLoggingClient == nil {
t.Errorf("Did not populate Cloud Logging client.")
}
w.ComputeClient = nil
tryPopulateClients(t, w, option.WithEndpoint("test.com"))
if w.ComputeClient.BasePath() != "test.com" {
t.Errorf("Did not accept custom options.")
}
}
func tryPopulateClients(t *testing.T, w *Workflow, options ...option.ClientOption) {
if err := w.PopulateClients(context.Background(), options...); err != nil {
t.Errorf("Failed to populate clients for workflow: %v", err)
}
}
func TestCancelReasonEmptySingleWorkflow(t *testing.T) {
w1 := testWorkflow()
assertWorkflowCancelReason(t, w1, "")
}
func TestCancelReasonProvidedSingleWorkflow(t *testing.T) {
w1 := testWorkflow()
w1.cancelReason = "w1 cr"
assertWorkflowCancelReason(t, w1, "w1 cr")
}
func TestCancelReasonChild(t *testing.T) {
w1 := testWorkflow()
w2 := testWorkflow()
w2.parent = w1
w1.cancelReason = "w1 cr"
w2.cancelReason = "w2 cr"
assertWorkflowCancelReason(t, w1, "w1 cr")
assertWorkflowCancelReason(t, w2, "w2 cr")
}
func TestCancelReasonInheritedFromParent(t *testing.T) {
w1 := testWorkflow()
w2 := testWorkflow()
w2.parent = w1
w1.cancelReason = "w1 cr"
assertWorkflowCancelReason(t, w1, "w1 cr")
assertWorkflowCancelReason(t, w2, "w1 cr")
}
func TestCancelReasonInheritedFromGrandParent(t *testing.T) {
w1 := testWorkflow()
w2 := testWorkflow()
w3 := testWorkflow()
w2.parent = w1
w3.parent = w2
w1.cancelReason = "w1 cr"
assertWorkflowCancelReason(t, w1, "w1 cr")
assertWorkflowCancelReason(t, w2, "w1 cr")
assertWorkflowCancelReason(t, w3, "w1 cr")
}
func TestCancelReasonInheritedFromParentWhenGrandchild(t *testing.T) {
w1 := testWorkflow()
w2 := testWorkflow()
w3 := testWorkflow()
w2.parent = w1
w3.parent = w2
w2.cancelReason = "w2 cr"
assertWorkflowCancelReason(t, w1, "")
assertWorkflowCancelReason(t, w2, "w2 cr")
assertWorkflowCancelReason(t, w3, "w2 cr")
}
func assertWorkflowCancelReason(t *testing.T, w *Workflow, expected string) {
if cr := w.getCancelReason(); cr != expected {
t.Errorf("Expected cancel reason `%v` but got `%v` ", expected, cr)
}
}
func TestOnStepCancelDefaultCancelReason(t *testing.T) {
w := testWorkflow()
s := &Step{name: "s", w: w}
err := w.onStepCancel(s, "Dummy")
expectedErrorMessage := "Step \"s\" (Dummy) is canceled."
if err.Error() != expectedErrorMessage {
t.Errorf("Expected error message `%v` but got `%v` ", expectedErrorMessage, err.Error())
}
}
func TestOnStepCancelCustomCancelReason(t *testing.T) {
w := testWorkflow()
w.cancelReason = "failed horribly"
s := &Step{name: "s", w: w}
err := w.onStepCancel(s, "Dummy")
expectedErrorMessage := "Step \"s\" (Dummy) failed horribly."
if err.Error() != expectedErrorMessage {
t.Errorf("Expected error message `%v` but got `%v` ", expectedErrorMessage, err.Error())
}
}
|
{
tests := []struct{ name, wfName, wfID, want string }{
{"name", "wfname", "123456789", "name-wfname-123456789"},
{"super-long-name-really-long", "super-long-workflow-name-like-really-really-long", "1", "super-long-name-really-long-super-long-workflow-name-lik-1"},
{"super-long-name-really-long", "super-long-workflow-name-like-really-really-long", "123456789", "super-long-name-really-long-super-long-workflow-name-lik-123456"},
}
w := &Workflow{}
for _, tt := range tests {
w.id = tt.wfID
w.Name = tt.wfName
result := w.genName(tt.name)
if result != tt.want {
t.Errorf("bad result, i: name=%s wfName=%s wfId=%s; got: %s; want: %s", tt.name, tt.wfName, tt.wfID, result, tt.want)
}
if len(result) > 64 {
t.Errorf("result > 64 characters, i: name=%s wfName=%s wfId=%s; got: %s", tt.name, tt.wfName, tt.wfID, result)
}
}
}
|
app.component.spec.ts
|
import { TestBed } from '@angular/core/testing';
import { RouterTestingModule } from '@angular/router/testing';
import { AppComponent } from './app.component';
describe('AppComponent', () => {
beforeEach(async () => {
await TestBed.configureTestingModule({
imports: [
RouterTestingModule
],
declarations: [
AppComponent
],
}).compileComponents();
});
|
});
it(`should have as title 'zitadel-angular-template'`, () => {
const fixture = TestBed.createComponent(AppComponent);
const app = fixture.componentInstance;
expect(app.title).toEqual('zitadel-angular-template');
});
it('should render title', () => {
const fixture = TestBed.createComponent(AppComponent);
fixture.detectChanges();
const compiled = fixture.nativeElement;
expect(compiled.querySelector('.content span').textContent).toContain('zitadel-angular-template app is running!');
});
});
|
it('should create the app', () => {
const fixture = TestBed.createComponent(AppComponent);
const app = fixture.componentInstance;
expect(app).toBeTruthy();
|
tiny_bls_sig.go
|
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package bls_sig
import (
"crypto/sha256"
"fmt"
"math/big"
bls12381 "github.com/dB2510/kryptology/pkg/core/curves/native/bls12-381"
"github.com/dB2510/kryptology/pkg/signatures/bls/finitefield"
)
// Implement BLS signatures on the BLS12-381 curve
// according to https://crypto.standford.edu/~dabo/pubs/papers/BLSmultisig.html
// and https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03
// this file implements signatures in G1 and public keys in G2.
// Public Keys and Signatures can be aggregated but the consumer
// must use proofs of possession to defend against rogue-key attacks.
const (
// Public key size in G2
PublicKeyVtSize = 96
// Signature size in G1
SignatureVtSize = 48
// Proof of Possession in G1
ProofOfPossessionVtSize = 48
)
// Represents a public key in G2
type PublicKeyVt struct {
value bls12381.PointG2
}
// Serialize a public key to a byte array in compressed form.
// See
// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization
// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html
func (pk *PublicKeyVt) MarshalBinary() ([]byte, error) {
return blsEngine.G2.ToCompressed(&pk.value), nil
}
// Deserialize a public key from a byte array in compressed form.
// See
// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization
// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html
// If successful, it will assign the public key
// otherwise it will return an error
func (pk *PublicKeyVt) UnmarshalBinary(data []byte) error {
if len(data) != PublicKeyVtSize {
return fmt.Errorf("public key must be %d bytes", PublicKeySize)
}
p2, err := blsEngine.G2.FromCompressed(data)
if err != nil {
return err
}
if blsEngine.G2.IsZero(p2) {
return fmt.Errorf("public keys cannot be zero")
}
pk.value = *p2
return nil
}
// Represents a BLS signature in G1
type SignatureVt struct {
value bls12381.PointG1
}
// Serialize a signature to a byte array in compressed form.
// See
// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization
// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html
func (sig *SignatureVt) MarshalBinary() ([]byte, error) {
return blsEngine.G1.ToCompressed(&sig.value), nil
}
func (sig *SignatureVt) verify(pk *PublicKeyVt, message []byte, signDstVt string) (bool, error) {
return pk.verifySignatureVt(message, sig, signDstVt)
}
// The AggregateVerify algorithm checks an aggregated signature over
// several (PK, message) pairs.
// The Signature is the output of aggregateSignaturesVt
// Each message must be different or this will return false.
// See section 3.1.1 from
// https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03
func (sig *SignatureVt) aggregateVerify(pks []*PublicKeyVt, msgs [][]byte, signDstVt string) (bool, error) {
return sig.coreAggregateVerify(pks, msgs, signDstVt)
}
func (sig *SignatureVt) coreAggregateVerify(pks []*PublicKeyVt, msgs [][]byte, signDstVt string) (bool, error) {
if len(pks) < 1 {
return false, fmt.Errorf("at least one key is required")
}
if len(msgs) < 1 {
return false, fmt.Errorf("at least one message is required")
}
if len(pks) != len(msgs) {
return false, fmt.Errorf("the number of public keys does not match the number of messages: %v != %v", len(pks), len(msgs))
}
if !blsEngine.G1.InCorrectSubgroup(&sig.value) {
return false, fmt.Errorf("signature is not in the correct subgroup")
}
engine := bls12381.NewEngine()
dst := []byte(signDstVt)
// e(H(m_1), pk_1)*...*e(H(m_N), pk_N) == e(s, g2)
// However, we use only one miller loop
// by doing the equivalent of
// e(H(m_1), pk_1)*...*e(H(m_N), pk_N) * e(s^-1, g2) == 1
for i, pk := range pks {
if pk == nil {
return false, fmt.Errorf("public key at %d is nil", i)
}
if engine.G2.IsZero(&pk.value) || !engine.G2.InCorrectSubgroup(&pk.value) {
return false, fmt.Errorf("public key at %d is not in the correct subgroup", i)
}
p1, err := engine.G1.HashToCurve(sha256.New, msgs[i], dst)
if err != nil {
return false, err
}
engine.AddPair(p1, &pk.value)
}
g2 := engine.G2.One()
engine.G2.Neg(g2, g2)
engine.AddPair(&sig.value, g2)
return engine.Check()
}
// Deserialize a signature from a byte array in compressed form.
// See
// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization
// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html
// If successful, it will assign the Signature
// otherwise it will return an error
func (sig *SignatureVt) UnmarshalBinary(data []byte) error {
if len(data) != SignatureVtSize {
return fmt.Errorf("signature must be %d bytes", SignatureSize)
}
p1, err := blsEngine.G1.FromCompressed(data)
if err != nil {
return err
}
if blsEngine.G1.IsZero(p1) {
return fmt.Errorf("signatures cannot be zero")
}
sig.value = *p1
return nil
}
// Get the corresponding public key from a secret key
// Verifies the public key is in the correct subgroup
func (sk *SecretKey) GetPublicKeyVt() (*PublicKeyVt, error) {
result := blsEngine.G2.New()
blsEngine.G2.MulScalar(result, blsEngine.G2.One(), &sk.value)
if !blsEngine.G2.InCorrectSubgroup(result) || blsEngine.G2.IsZero(result) {
return nil, fmt.Errorf("point is not in correct subgroup")
}
return &PublicKeyVt{value: *result}, nil
}
// Compute a signature from a secret key and message
// This signature is deterministic which protects against
// attacks arising from signing with bad randomness like
// the nonce reuse attack on ECDSA. `message` is
// hashed to a point in G1 as described in to
// https://datatracker.ietf.org/doc/draft-irtf-cfrg-hash-to-curve/?include_text=1
// See Section 2.6 in https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03
// nil message is not permitted but empty slice is allowed
func (sk *SecretKey) createSignatureVt(message []byte, dstVt string) (*SignatureVt, error) {
if message == nil {
return nil, fmt.Errorf("message cannot be nil")
}
if sk.value.Cmp(big.NewInt(0)) == 0 {
return nil, fmt.Errorf("invalid secret key")
}
p1, err := blsEngine.G1.HashToCurve(sha256.New, message, []byte(dstVt))
if err != nil {
return nil, err
}
result := blsEngine.G1.New()
blsEngine.G1.MulScalar(result, p1, &sk.value)
if !blsEngine.G1.InCorrectSubgroup(result) {
return nil, fmt.Errorf("point is not in correct subgroup")
}
return &SignatureVt{value: *result}, nil
}
// Verify a signature is valid for the message under this public key.
// See Section 2.7 in https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-03
func (pk PublicKeyVt) verifySignatureVt(message []byte, signature *SignatureVt, dstVt string) (bool, error) {
if signature == nil || message == nil || blsEngine.G2.IsZero(&pk.value) {
return false, fmt.Errorf("signature and message and public key cannot be nil or zero")
}
if blsEngine.G1.IsZero(&signature.value) || !blsEngine.G1.InCorrectSubgroup(&signature.value) {
return false, fmt.Errorf("signature is not in the correct subgroup")
}
engine := bls12381.NewEngine()
p1, err := engine.G1.HashToCurve(sha256.New, message, []byte(dstVt))
if err != nil {
return false, err
}
// e(H(m), pk) == e(s, g2)
// However, we can reduce the number of miller loops
// by doing the equivalent of
// e(H(m)^-1, pk) * e(s, g2) == 1
engine.AddPairInv(p1, &pk.value)
engine.AddPair(&signature.value, engine.G2.One())
return engine.Check()
}
// Combine public keys into one aggregated key
func aggregatePublicKeysVt(pks ...*PublicKeyVt) (*PublicKeyVt, error) {
if len(pks) < 1 {
return nil, fmt.Errorf("at least one public key is required")
}
result := blsEngine.G2.New()
for i, k := range pks {
if k == nil {
return nil, fmt.Errorf("key at %d is nil, keys cannot be nil", i)
}
if !blsEngine.G2.InCorrectSubgroup(&k.value) {
return nil, fmt.Errorf("key at %d is not in the correct subgroup", i)
}
blsEngine.G2.Add(result, result, &k.value)
}
return &PublicKeyVt{value: *result}, nil
}
// Combine signatures into one aggregated signature
func aggregateSignaturesVt(sigs ...*SignatureVt) (*SignatureVt, error) {
if len(sigs) < 1 {
return nil, fmt.Errorf("at least one signature is required")
}
result := blsEngine.G1.New()
for i, s := range sigs {
if s == nil {
return nil, fmt.Errorf("signature at %d is nil, signature cannot be nil", i)
}
if !blsEngine.G1.InCorrectSubgroup(&s.value) {
return nil, fmt.Errorf("signature at %d is not in the correct subgroup", i)
}
blsEngine.G1.Add(result, result, &s.value)
}
return &SignatureVt{value: *result}, nil
}
// A proof of possession scheme uses a separate public key validation
// step, called a proof of possession, to defend against rogue key
// attacks. This enables an optimization to aggregate signature
// verification for the case that all signatures are on the same
// message.
type ProofOfPossessionVt struct {
value bls12381.PointG1
}
// Generates a proof-of-possession (PoP) for this secret key. The PoP signature should be verified before
// before accepting any aggregate signatures related to the corresponding pubkey.
func (sk *SecretKey) createProofOfPossessionVt(popDstVt string) (*ProofOfPossessionVt, error) {
pk, err := sk.GetPublicKeyVt()
if err != nil {
|
return nil, err
}
msg, err := pk.MarshalBinary()
if err != nil {
return nil, err
}
sig, err := sk.createSignatureVt(msg, popDstVt)
if err != nil {
return nil, err
}
return &ProofOfPossessionVt{value: sig.value}, nil
}
// Serialize a proof of possession to a byte array in compressed form.
// See
// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization
// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html
func (pop *ProofOfPossessionVt) MarshalBinary() ([]byte, error) {
return blsEngine.G1.ToCompressed(&pop.value), nil
}
// Deserialize a proof of possession from a byte array in compressed form.
// See
// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization
// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html
// If successful, it will assign the Signature
// otherwise it will return an error
func (pop *ProofOfPossessionVt) UnmarshalBinary(data []byte) error {
p1 := new(SignatureVt)
err := p1.UnmarshalBinary(data)
if err != nil {
return err
}
pop.value = p1.value
return nil
}
// Verifies that PoP is valid for this pubkey. In order to prevent rogue key attacks, a PoP must be validated
// for each pubkey in an aggregated signature.
func (pop *ProofOfPossessionVt) verify(pk *PublicKeyVt, popDstVt string) (bool, error) {
if pk == nil {
return false, fmt.Errorf("public key cannot be nil")
}
msg, err := pk.MarshalBinary()
if err != nil {
return false, err
}
return pk.verifySignatureVt(msg, &SignatureVt{value: pop.value}, popDstVt)
}
// Represents an MultiSignature in G1. A multisignature is used when multiple signatures
// are calculated over the same message vs an aggregate signature where each message signed
// is a unique.
type MultiSignatureVt struct {
value bls12381.PointG1
}
// Serialize a multi-signature to a byte array in compressed form.
// See
// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization
// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html
func (sig *MultiSignatureVt) MarshalBinary() ([]byte, error) {
return blsEngine.G1.ToCompressed(&sig.value), nil
}
// Check a multisignature is valid for a multipublickey and a message
func (sig *MultiSignatureVt) verify(pk *MultiPublicKeyVt, message []byte, signDstVt string) (bool, error) {
if pk == nil {
return false, fmt.Errorf("public key cannot be nil")
}
p := &PublicKeyVt{value: pk.value}
return p.verifySignatureVt(message, &SignatureVt{value: sig.value}, signDstVt)
}
// Deserialize a signature from a byte array in compressed form.
// See
// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization
// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html
// If successful, it will assign the Signature
// otherwise it will return an error
func (sig *MultiSignatureVt) UnmarshalBinary(data []byte) error {
if len(data) != SignatureVtSize {
return fmt.Errorf("multi signature must be %v bytes", SignatureSize)
}
s1 := new(SignatureVt)
err := s1.UnmarshalBinary(data)
if err != nil {
return err
}
sig.value = s1.value
return nil
}
// Represents accumulated multiple Public Keys in G2 for verifying a multisignature
type MultiPublicKeyVt struct {
value bls12381.PointG2
}
// Serialize a public key to a byte array in compressed form.
// See
// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization
// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html
func (pk *MultiPublicKeyVt) MarshalBinary() ([]byte, error) {
return blsEngine.G2.ToCompressed(&pk.value), nil
}
// Deserialize a public key from a byte array in compressed form.
// See
// https://github.com/zcash/librustzcash/blob/master/pairing/src/bls12_381/README.md#serialization
// https://docs.rs/bls12_381/0.1.1/bls12_381/notes/serialization/index.html
// If successful, it will assign the public key
// otherwise it will return an error
func (pk *MultiPublicKeyVt) UnmarshalBinary(data []byte) error {
if len(data) != PublicKeyVtSize {
return fmt.Errorf("multi public key must be %v bytes", PublicKeySize)
}
p2 := new(PublicKeyVt)
err := p2.UnmarshalBinary(data)
if err != nil {
return err
}
pk.value = p2.value
return nil
}
// Check a multisignature is valid for a multipublickey and a message
func (pk *MultiPublicKeyVt) verify(message []byte, sig *MultiSignatureVt, signDstVt string) (bool, error) {
return sig.verify(pk, message, signDstVt)
}
// PartialSignatureVt represents threshold Gap Diffie-Hellman BLS signature
// that can be combined with other partials to yield a completed BLS signature
// See section 3.2 in <https://www.cc.gatech.edu/~aboldyre/papers/bold.pdf>
type PartialSignatureVt struct {
identifier byte
signature bls12381.PointG1
}
// partialSignVt creates a partial signature that can be combined with other partial signatures
// to yield a complete signature
func (sks *SecretKeyShare) partialSignVt(message []byte, signDst string) (*PartialSignatureVt, error) {
if len(message) == 0 {
return nil, fmt.Errorf("message cannot be empty or nil")
}
p1, err := blsEngine.G1.HashToCurve(sha256.New, message, []byte(signDst))
if err != nil {
return nil, err
}
result := blsEngine.G1.New()
blsEngine.G1.MulScalar(result, p1, sks.value.Secret.BigInt())
if !blsEngine.G1.InCorrectSubgroup(result) {
return nil, fmt.Errorf("point is not on correct subgroup")
}
return &PartialSignatureVt{identifier: sks.value.Identifier, signature: *result}, nil
}
// combineSigsVt gathers partial signatures and yields a complete signature
func combineSigsVt(partials []*PartialSignatureVt) (*SignatureVt, error) {
if len(partials) < 2 {
return nil, fmt.Errorf("must have at least 2 partial signatures")
}
if len(partials) > 255 {
return nil, fmt.Errorf("unsupported to combine more than 255 signatures")
}
field := finitefield.New(blsEngine.G1.Q())
xVars, yVars, err := splitXYVt(field, partials)
if err != nil {
return nil, err
}
sTmp := blsEngine.G1.New()
sig := blsEngine.G1.New()
// Lagrange interpolation
x := field.Zero()
for i, xi := range xVars {
basis := field.One()
for j, xj := range xVars {
if i == j {
continue
}
num := x.Sub(xj) // x - x_m
den := xi.Sub(xj) // x_j - x_m
if den.IsEqual(field.Zero()) {
return nil, fmt.Errorf("signatures cannot be recombined")
}
basis = basis.Mul(num.Div(den))
}
blsEngine.G1.MulScalar(sTmp, yVars[i], basis.BigInt())
blsEngine.G1.Add(sig, sig, sTmp)
}
if !blsEngine.G1.InCorrectSubgroup(sig) {
return nil, fmt.Errorf("signature is not in the correct subgroup")
}
return &SignatureVt{value: *sig}, nil
}
// Ensure no duplicates x values and convert x values to field elements
func splitXYVt(field *finitefield.Field, partials []*PartialSignatureVt) ([]*finitefield.Element, []*bls12381.PointG1, error) {
x := make([]*finitefield.Element, len(partials))
y := make([]*bls12381.PointG1, len(partials))
dup := make(map[byte]bool)
for i, sp := range partials {
if sp == nil {
return nil, nil, fmt.Errorf("partial signature cannot be nil")
}
if _, exists := dup[sp.identifier]; exists {
return nil, nil, fmt.Errorf("duplicate signature included")
}
if !blsEngine.G1.InCorrectSubgroup(&sp.signature) {
return nil, nil, fmt.Errorf("signature is not in the correct subgroup")
}
dup[sp.identifier] = true
x[i] = field.ElementFromBytes([]byte{sp.identifier})
y[i] = &sp.signature
}
return x, y, nil
}
| |
screenshot.go
|
package utils
import (
"github.com/go-gl/gl/v3.3-core/gl"
"github.com/wieku/danser-go/framework/graphics/texture"
"log"
"os"
"time"
)
func
|
(w, h int, name string, async bool) {
pixmap := texture.NewPixMapC(w, h, 3)
gl.PixelStorei(gl.PACK_ALIGNMENT, int32(1))
gl.ReadPixels(0, 0, int32(w), int32(h), gl.RGB, gl.UNSIGNED_BYTE, pixmap.RawPointer)
save := func() {
defer pixmap.Dispose()
err := os.Mkdir("screenshots", 0755)
if err != nil && !os.IsExist(err) {
log.Println("Failed to save the screenshot! Error:", err)
return
}
fileName := name
if fileName == "" {
fileName = "danser_" + time.Now().Format("2006-01-02_15-04-05")
}
fileName += ".png"
err = pixmap.WritePng("screenshots/"+fileName, true)
if err != nil {
log.Println("Failed to save the screenshot! Error:", err)
return
}
log.Println("Screenshot", fileName, "saved!")
}
if async {
go save()
} else {
save()
}
}
|
MakeScreenshot
|
appointment.service.ts
|
import { Injectable } from '@angular/core';
import { Observable } from 'rxjs/Observable';
import { AppointmentService } from '../api/api/appointment.service';
import { ViewAppointment } from './appointment.viewmodel';
@Injectable()
export class ViewAppointmentService {
constructor(private appointmentService: AppointmentService) {}
public appointmentFind(
filter?: string,
extraHttpRequestParams?: any): Observable<ViewAppointment[]> {
return this.appointmentService.appointmentFindDeep(filter, extraHttpRequestParams)
.map((x, idx) => {
// Event colors
for (let i = x.length - 1; i >= 0; i--) {
if (x[i].examinations && x[i].examinations.length > 0) {
x[i].color = x[i].examinations[0].color;
x[i].backgroundColor = x[i].examinations[0].backgroundColor;
x[i].borderColor = x[i].backgroundColor;
}
// Event title display
if (!x[i].title) {
if (x[i].patient) {
x[i].title =
`${x[i].patient.givenName} ${x[i].patient.surname}`;
}
}
// Render blocked appointments differently
if (x[i].autoAppointmentBlockedSecret) {
x[i].title = 'Auto-Offered';
x[i].color = '#000000';
x[i].backgroundColor = '#ffffff';
x[i].borderColor = '#000000';
x[i].className = 'auto-appointment-blocked';
}
}
return x;
});
}
public appointmentFindAnonymous(
filter?: string,
extraHttpRequestParams?: any): Observable<ViewAppointment[]> {
return this.appointmentService.appointmentFindDeep(filter, extraHttpRequestParams)
.map((x, idx) => {
// Event colors
for (let i = x.length - 1; i >= 0; i--) {
x.id = undefined;
if (x[i].examinations && x[i].examinations.length > 0) {
x[i].color = x[i].examinations[0].color;
x[i].backgroundColor = x[i].examinations[0].backgroundColor;
x[i].borderColor = x[i].backgroundColor;
}
x[i].examinations = undefined;
x[i].title = undefined;
x[i].patient = undefined;
x[i].description = undefined;
}
return x;
});
}
|
}
|
|
emulator.ts
|
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Bot Framework: http://botframework.com
//
// Bot Framework Emulator Github:
// https://github.com/Microsoft/BotFramwork-Emulator
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
var http = require('http');
var https = require('https');
var ElectronProxyAgent = require('electron-proxy-agent');
import { BotFrameworkService } from './botFrameworkService';
import { ConversationManager } from './conversationManager';
import * as Settings from './settings';
import * as Electron from 'electron';
import { windowManager, mainWindow } from './main';
interface IQueuedMessage {
channel: any,
args: any[]
}
/**
* Top-level state container for the Node process.
*/
export class
|
{
framework = new BotFrameworkService();
conversations = new ConversationManager();
proxyAgent: any;
static queuedMessages: IQueuedMessage[] = [];
constructor() {
// When the client notifies us it has started up, send it the configuration.
// Note: We're intentionally sending and ISettings here, not a Settings. This
// is why we're getting the value from getStore().getState().
Electron.ipcMain.on('clientStarted', () => {
// Use system proxy settings for outgoing requests
const session = Electron.session.defaultSession;
this.proxyAgent = new ElectronProxyAgent(session);
http.globalAgent = this.proxyAgent;
https.globalAgent = this.proxyAgent;
windowManager.addMainWindow(mainWindow);
Emulator.queuedMessages.forEach((msg) => {
Emulator.send(msg.channel, ...msg.args);
});
Emulator.queuedMessages = [];
Emulator.send('serverSettings', Settings.getStore().getState());
});
Settings.addSettingsListener(() => {
Emulator.send('serverSettings', Settings.getStore().getState());
});
Electron.ipcMain.on('getSpeechToken', (event, args: string) => {
// args is the conversation id
this.getSpeechToken(event, args);
});
Electron.ipcMain.on('refreshSpeechToken', (event, args: string) => {
// args is the conversation id
this.getSpeechToken(event, args, true);
});
}
private getSpeechToken(event: Electron.Event, conversationId: string, refresh: boolean = false) {
const settings = Settings.getSettings();
const activeBot = settings.getActiveBot();
if (activeBot && activeBot.botId && conversationId) {
let conversation = this.conversations.conversationById(activeBot.botId, conversationId);
conversation.getSpeechToken(10, (tokenInfo) => {
event.returnValue = tokenInfo;
}, refresh);
} else {
event.returnValue = { error: 'No bot', error_Description: 'To use speech, you must connect to a bot and have an active conversation.'};
}
}
/**
* Loads settings from disk and then creates the emulator.
*/
static startup() {
Settings.startup();
emulator = new Emulator();
emulator.framework.startup();
}
/**
* Sends a command to the client.
*/
static send(channel: string, ...args: any[]) {
if (windowManager && windowManager.hasMainWindow()) {
windowManager.getMainWindow().webContents.send(channel, ...args);
} else {
Emulator.queuedMessages.push({ channel, args})
}
}
}
export let emulator: Emulator;
|
Emulator
|
index.spec.js
|
'use strict'
/* global describe, it, beforeEach */
const expect = require('chai').expect
const dustcover = require('../')
const Bookshelf = require('./helpers/bookshelf')
const models = require('./helpers/models')
const migrations = require('./helpers/migrations')
const seeds = require('./helpers/seeds')
let bookshelf
let Models
describe('dustcover', function () {
describe('optIn option', function () {
beforeEach(function () {
bookshelf = Bookshelf()
})
describe('set to false', function () {
beforeEach(function () {
bookshelf.plugin(dustcover, {
host: 'http://localhost:3000',
optIn: false
})
Models = models(bookshelf)
return migrations(bookshelf).then(() => seeds(bookshelf))
})
describe('single model serialization', function () {
describe('model jsonapi property not set', function () {
it('should jsonapi serialize model', function () {
return new Models.Cat({id: 1}).fetch().then((cat) => {
const serialized = cat.toJSON()
expect(serialized).to.include.key('data')
expect(serialized.data.attributes).to.include.keys(['name', 'description'])
})
})
})
describe('model jsonapi property set to true', function () {
it('should jsonapi serialize model', function () {
return new Models.Hat({id: 1}).fetch().then((hat) => {
const serialized = hat.toJSON()
expect(serialized).to.include.key('data')
expect(serialized.data.attributes).to.include.keys(['name', 'description'])
})
})
})
describe('model jsonapi property set to false', function () {
it('should not jsonapi serialize model', function () {
return new Models.Dress({id: 1}).fetch().then((dress) => {
const serialized = dress.toJSON()
expect(serialized).to.not.include.key('data')
expect(serialized).to.include.keys(['name', 'description'])
})
})
})
})
describe('collection serialization', function () {
describe('model jsonapi property not set', function () {
it('should jsonapi serialize model', function () {
return Models.Cat.fetchAll().then((cats) => {
const serialized = cats.toJSON()
expect(serialized).to.include.key('data')
expect(serialized.data[0].attributes).to.include.keys(['name', 'description'])
})
})
})
describe('model jsonapi property set to true', function () {
it('should jsonapi serialize model', function () {
return Models.Hat.fetchAll().then((hats) => {
const serialized = hats.toJSON()
expect(serialized).to.include.key('data')
expect(serialized.data[0]).to.include.keys(['attributes'])
})
})
})
describe('model jsonapi property set to false', function () {
it('should not jsonapi serialize model', function () {
return Models.Dress.fetchAll().then((dresses) => {
const serialized = dresses.toJSON()
expect(serialized).to.not.include.key('data')
expect(serialized).to.be.an('array')
})
})
})
})
})
describe('set to true', function () {
beforeEach(function () {
bookshelf.plugin(dustcover, {
host: 'http://localhost:3000',
optIn: true
})
Models = models(bookshelf)
return migrations(bookshelf).then(() => seeds(bookshelf))
})
describe('single model serialization', function () {
describe('model jsonapi property not set', function () {
it('should not jsonapi serialize model', function () {
return new Models.Cat({id: 1}).fetch().then((cat) => {
const serialized = cat.toJSON()
expect(serialized).to.not.include.key('data')
expect(serialized).to.include.keys(['name', 'description'])
})
})
})
describe('model jsonapi property set to true', function () {
it('should jsonapi serialize model', function () {
return new Models.Hat({id: 1}).fetch().then((hat) => {
const serialized = hat.toJSON()
expect(serialized).to.include.key('data')
expect(serialized.data.attributes).to.include.keys(['name', 'description'])
})
})
})
describe('model jsonapi property set to false', function () {
it('should not jsonapi serialize model', function () {
return new Models.Dress({id: 1}).fetch().then((dress) => {
const serialized = dress.toJSON()
expect(serialized).to.not.include.key('data')
expect(serialized).to.include.keys(['name', 'description'])
})
})
})
})
describe('collection serialization', function () {
describe('model jsonapi property not set', function () {
it('should not jsonapi serialize model', function () {
return Models.Cat.fetchAll().then((cats) => {
const serialized = cats.toJSON()
expect(serialized).to.not.include.key('data')
expect(serialized).to.be.an('array')
})
})
})
describe('model jsonapi property set to true', function () {
it('should jsonapi serialize model', function () {
return Models.Hat.fetchAll().then((hats) => {
const serialized = hats.toJSON()
expect(serialized).to.include.key('data')
expect(serialized.data[0]).to.include.keys(['attributes'])
})
})
})
describe('model jsonapi property set to false', function () {
it('should not jsonapi serialize model', function () {
return Models.Dress.fetchAll().then((dresses) => {
const serialized = dresses.toJSON()
expect(serialized).to.not.include.key('data')
expect(serialized).to.be.an('array')
})
})
})
})
})
})
describe('serialization', function () {
beforeEach(function () {
bookshelf = Bookshelf()
bookshelf.plugin(dustcover, {host: 'http://localhost:3000'})
Models = models(bookshelf)
return migrations(bookshelf).then(() => seeds(bookshelf))
})
describe('fetching all records', function () {
it('toJSON should serialize an array of models in JSON API format', function () {
return Models.Book.fetchAll().then((books) => {
const serialized = books.toJSON({type: 'books'})
expect(serialized).to.include.key('data')
expect(serialized.data).to.be.an('array')
expect(serialized.data[0]).to.include.keys('id', 'type', 'attributes')
expect(serialized.data[0].type).to.equal('books')
})
})
it('type should be determined automatically if available', function () {
return Models.Cat.fetchAll().then((cats) => {
const serialized = cats.toJSON()
expect(serialized.data[0].type).to.equal('cats')
expect(serialized.data[0].attributes).to.not.have.key('type')
})
})
it('empty result array should still be keyed by data', function () {
return Models.House.fetchAll().then((houses) => {
const serialized = houses.toJSON()
expect(serialized.data).to.be.an('array')
expect(serialized.data.length).to.equal(0)
})
})
it('disable jsonapi serialization for a collection when calling toJSON', function () {
return new Models.Cat().fetchAll().then((cats) => {
const serialized = cats.toJSON({jsonapi: false})
expect(serialized).to.be.an('array')
expect(serialized[0]).to.include.keys(['name', 'description'])
})
})
})
describe('fetching a single record', function () {
it('toJSON should serialize a model in JSON API format', function () {
return new Models.Cat({id: 1}).fetch().then((cat) => {
const serialized = cat.toJSON()
expect(serialized).to.include.key('data')
expect(serialized.data).to.be.an('object')
expect(serialized.data).to.include.keys('id', 'type', 'attributes', 'links')
expect(serialized.data.type).to.equal('cats')
expect(serialized.data.links.self).to.equal('http://localhost:3000/cats/1')
})
})
it('relationships key should absent if none exist', function () {
return new Models.Book({id: 1}).fetch().then((book) => {
expect(book.toJSON().data).to.not.include.key('relationships')
})
})
it('relationships key should be populated if any exist', function () {
return new Models.Cat({id: 1}).fetch().then((cat) => {
const serialized = cat.toJSON()
expect(serialized.data).to.include.key('relationships')
expect(serialized.data.relationships).to.be.an('object')
expect(serialized.data.relationships).to.include.key('owner')
expect(serialized.data.relationships.owner).to.include.key('links')
expect(serialized.data.relationships.owner.links).to.include.key('related')
expect(serialized.data.relationships.owner.links.related).to.equal('http://localhost:3000/cats/1/owner')
expect(serialized.data.relationships.owner).to.not.include.key('data')
})
})
it('disable jsonapi serialization for a single toJSON call', function () {
return new Models.Cat({id: 1}).fetch().then((cat) => {
const serialized = cat.toJSON({jsonapi: false})
expect(serialized).to.not.include.key('data')
|
})
})
})
|
expect(serialized).to.include.keys(['name', 'description'])
})
})
|
resources_js_vue_views_Articles_vue.js
|
"use strict";
(self["webpackChunk"] = self["webpackChunk"] || []).push([["resources_js_vue_views_Articles_vue"],{
/***/ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Articles/Articles.vue?vue&type=script&lang=js":
/*!***************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Articles/Articles.vue?vue&type=script&lang=js ***!
\***************************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _Card_Card_Article_vue__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../Card/Card.Article.vue */ "./resources/js/vue/components/Card/Card.Article.vue");
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = ({
props: {
articles: {
type: Array,
"default": []
}
},
components: {
CardArticle: _Card_Card_Article_vue__WEBPACK_IMPORTED_MODULE_0__["default"]
},
setup: function setup(props) {
var articles = props.articles;
return {
articles: articles
};
}
});
/***/ }),
/***/ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Card/Card.Article.vue?vue&type=script&lang=js":
/*!***************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Card/Card.Article.vue?vue&type=script&lang=js ***!
\***************************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _vue_reactivity__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @vue/reactivity */ "./node_modules/@vue/reactivity/dist/reactivity.esm-bundler.js");
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = ({
props: {
article: {
type: Object,
"default": {}
}
},
setup: function setup(props) {
var title = (0,_vue_reactivity__WEBPACK_IMPORTED_MODULE_0__.computed)(function () {
return "".concat(props.article.title.substring(0, 50), "...");
});
var content = (0,_vue_reactivity__WEBPACK_IMPORTED_MODULE_0__.computed)(function () {
return "".concat(props.article.content.substring(0, 100), "...");
});
var image =
/*props.article.image_url ??*/
'https://images.unsplash.com/photo-1555066931-4365d14bab8c?crop=entropy&cs=tinysrgb&fit=crop&fm=jpg&h=600&ixid=MnwxfDB8MXxyYW5kb218MHx8Y29kZXx8fHx8fDE2MzE1ODM3Njk&ixlib=rb-1.2.1&q=80&utm_campaign=api-credit&utm_medium=referral&utm_source=unsplash_source&w=600';
var slug = props.article.slug;
return {
title: title,
content: content,
image: image,
slug: slug
};
}
});
/***/ }),
/***/ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Loader/FlexCircle.Loader.vue?vue&type=script&lang=js":
/*!**********************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Loader/FlexCircle.Loader.vue?vue&type=script&lang=js ***!
\**********************************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = ({});
/***/ }),
/***/ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/views/Articles.vue?vue&type=script&lang=js":
/*!*************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/views/Articles.vue?vue&type=script&lang=js ***!
\*************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _vue_reactivity__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! @vue/reactivity */ "./node_modules/@vue/reactivity/dist/reactivity.esm-bundler.js");
/* harmony import */ var _components_Articles_Articles_vue__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../components/Articles/Articles.vue */ "./resources/js/vue/components/Articles/Articles.vue");
/* harmony import */ var _components_Loader_FlexCircle_Loader_vue__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../components/Loader/FlexCircle.Loader.vue */ "./resources/js/vue/components/Loader/FlexCircle.Loader.vue");
/* harmony import */ var _fetch_articles_all__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../fetch/articles.all */ "./resources/js/vue/fetch/articles.all.js");
/* harmony import */ var _vue_runtime_core__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! @vue/runtime-core */ "./node_modules/@vue/runtime-core/dist/runtime-core.esm-bundler.js");
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = ({
components: {
Articles: _components_Articles_Articles_vue__WEBPACK_IMPORTED_MODULE_0__["default"],
FlexCircleLoader: _components_Loader_FlexCircle_Loader_vue__WEBPACK_IMPORTED_MODULE_1__["default"]
},
setup: function setup() {
var articles = (0,_vue_reactivity__WEBPACK_IMPORTED_MODULE_3__.ref)([]);
var isEmpty = (0,_vue_reactivity__WEBPACK_IMPORTED_MODULE_3__.computed)(function () {
return articles.value.length <= 0;
});
var loading = (0,_vue_reactivity__WEBPACK_IMPORTED_MODULE_3__.ref)(false);
(0,_vue_runtime_core__WEBPACK_IMPORTED_MODULE_4__.onMounted)(function () {
loading.value = true;
(0,_fetch_articles_all__WEBPACK_IMPORTED_MODULE_2__.articlesAll)().then(function (res) {
return articles.value = res;
})["finally"](function () {
return loading.value = false;
});
});
return {
articles: articles,
isEmpty: isEmpty,
loading: loading
};
}
});
/***/ }),
/***/ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Articles/Articles.vue?vue&type=template&id=1e9bd7fd":
/*!*******************************************************************************************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Articles/Articles.vue?vue&type=template&id=1e9bd7fd ***!
\*******************************************************************************************************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* binding */ render)
/* harmony export */ });
/* harmony import */ var vue__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! vue */ "./node_modules/vue/dist/vue.esm-bundler.js");
var _hoisted_1 = {
"class": "row row-cols-1 row-cols-md-2 row-cols-lg-3 row-cols-xxl-4"
};
function render(_ctx, _cache, $props, $setup, $data, $options) {
var _component_CardArticle = (0,vue__WEBPACK_IMPORTED_MODULE_0__.resolveComponent)("CardArticle");
return (0,vue__WEBPACK_IMPORTED_MODULE_0__.openBlock)(), (0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementBlock)("div", _hoisted_1, [((0,vue__WEBPACK_IMPORTED_MODULE_0__.openBlock)(true), (0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementBlock)(vue__WEBPACK_IMPORTED_MODULE_0__.Fragment, null, (0,vue__WEBPACK_IMPORTED_MODULE_0__.renderList)($setup.articles, function (article) {
return (0,vue__WEBPACK_IMPORTED_MODULE_0__.openBlock)(), (0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementBlock)("div", {
"class": "col p-3",
key: article.id
}, [(0,vue__WEBPACK_IMPORTED_MODULE_0__.createVNode)(_component_CardArticle, {
article: article
}, null, 8
/* PROPS */
, ["article"])]);
}), 128
/* KEYED_FRAGMENT */
))]);
}
/***/ }),
/***/ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Card/Card.Article.vue?vue&type=template&id=08c8c276":
/*!*******************************************************************************************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Card/Card.Article.vue?vue&type=template&id=08c8c276 ***!
\*******************************************************************************************************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* binding */ render)
/* harmony export */ });
/* harmony import */ var vue__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! vue */ "./node_modules/vue/dist/vue.esm-bundler.js");
var _hoisted_1 = {
"class": "card p-0 border-0 shadow rounded-3"
};
var _hoisted_2 = ["src"];
var _hoisted_3 = {
"class": "card-body"
};
var _hoisted_4 = {
"class": "card-title"
};
var _hoisted_5 = {
"class": "card-text"
};
var _hoisted_6 = /*#__PURE__*/(0,vue__WEBPACK_IMPORTED_MODULE_0__.createTextVNode)("Read more");
function render(_ctx, _cache, $props, $setup, $data, $options) {
var _component_router_link = (0,vue__WEBPACK_IMPORTED_MODULE_0__.resolveComponent)("router-link");
return (0,vue__WEBPACK_IMPORTED_MODULE_0__.openBlock)(), (0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementBlock)("div", _hoisted_1, [(0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("img", {
src: $setup.image,
"class": "card-img-top h-250 object-cover",
alt: "code"
}, null, 8
/* PROPS */
, _hoisted_2), (0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("div", _hoisted_3, [(0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("h5", _hoisted_4, (0,vue__WEBPACK_IMPORTED_MODULE_0__.toDisplayString)($setup.title), 1
/* TEXT */
), (0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("p", _hoisted_5, (0,vue__WEBPACK_IMPORTED_MODULE_0__.toDisplayString)($setup.content), 1
/* TEXT */
), (0,vue__WEBPACK_IMPORTED_MODULE_0__.createVNode)(_component_router_link, {
to: {
name: 'Home'
},
"class": "btn btn-primary text-decoration-none ms-auto w-fit-content d-block"
}, {
"default": (0,vue__WEBPACK_IMPORTED_MODULE_0__.withCtx)(function () {
return [_hoisted_6];
}),
_: 1
/* STABLE */
})])]);
}
/***/ }),
/***/ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Loader/FlexCircle.Loader.vue?vue&type=template&id=0b1c155e":
/*!**************************************************************************************************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Loader/FlexCircle.Loader.vue?vue&type=template&id=0b1c155e ***!
\**************************************************************************************************************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* binding */ render)
/* harmony export */ });
/* harmony import */ var vue__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! vue */ "./node_modules/vue/dist/vue.esm-bundler.js");
var _hoisted_1 = {
"class": "d-flex justify-content-center mt-5"
};
var _hoisted_2 = /*#__PURE__*/(0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("div", {
"class": "spinner-grow spinner-grow-sm m-2",
role: "status"
}, [/*#__PURE__*/(0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("span", {
"class": "visually-hidden"
}, "Loading...")], -1
/* HOISTED */
);
var _hoisted_3 = /*#__PURE__*/(0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("div", {
"class": "spinner-grow spinner-grow-sm m-2",
role: "status"
}, [/*#__PURE__*/(0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("span", {
"class": "visually-hidden"
}, "Loading...")], -1
/* HOISTED */
);
var _hoisted_4 = /*#__PURE__*/(0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("div", {
"class": "spinner-grow spinner-grow-sm m-2",
role: "status"
}, [/*#__PURE__*/(0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("span", {
"class": "visually-hidden"
}, "Loading...")], -1
/* HOISTED */
);
var _hoisted_5 = [_hoisted_2, _hoisted_3, _hoisted_4];
function render(_ctx, _cache, $props, $setup, $data, $options) {
return (0,vue__WEBPACK_IMPORTED_MODULE_0__.openBlock)(), (0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementBlock)("div", _hoisted_1, _hoisted_5);
}
/***/ }),
/***/ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/views/Articles.vue?vue&type=template&id=ee95e7f6":
/*!*****************************************************************************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/views/Articles.vue?vue&type=template&id=ee95e7f6 ***!
\*****************************************************************************************************************************************************************************************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* binding */ render)
/* harmony export */ });
/* harmony import */ var vue__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! vue */ "./node_modules/vue/dist/vue.esm-bundler.js");
var _hoisted_1 = {
"class": "container mx-auto mt-5"
};
var _hoisted_2 = /*#__PURE__*/(0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("h1", {
"class": "text-center mb-3"
}, "Articles", -1
/* HOISTED */
);
var _hoisted_3 = {
key: 0
};
var _hoisted_4 = {
key: 1
};
var _hoisted_5 = /*#__PURE__*/(0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementVNode)("h1", {
"class": "text-center mt-5 lead"
}, "Articles is empty.", -1
/* HOISTED */
);
var _hoisted_6 = [_hoisted_5];
var _hoisted_7 = {
key: 2
};
function render(_ctx, _cache, $props, $setup, $data, $options) {
var _component_flex_circle_loader = (0,vue__WEBPACK_IMPORTED_MODULE_0__.resolveComponent)("flex-circle-loader");
var _component_articles = (0,vue__WEBPACK_IMPORTED_MODULE_0__.resolveComponent)("articles", true);
return (0,vue__WEBPACK_IMPORTED_MODULE_0__.openBlock)(), (0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementBlock)("div", _hoisted_1, [_hoisted_2, $setup.loading ? ((0,vue__WEBPACK_IMPORTED_MODULE_0__.openBlock)(), (0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementBlock)("div", _hoisted_3, [(0,vue__WEBPACK_IMPORTED_MODULE_0__.createVNode)(_component_flex_circle_loader)])) : $setup.isEmpty ? ((0,vue__WEBPACK_IMPORTED_MODULE_0__.openBlock)(), (0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementBlock)("div", _hoisted_4, _hoisted_6)) : ((0,vue__WEBPACK_IMPORTED_MODULE_0__.openBlock)(), (0,vue__WEBPACK_IMPORTED_MODULE_0__.createElementBlock)("div", _hoisted_7, [(0,vue__WEBPACK_IMPORTED_MODULE_0__.createVNode)(_component_articles, {
articles: $setup.articles
}, null, 8
/* PROPS */
, ["articles"])]))]);
}
/***/ }),
/***/ "./resources/js/vue/fetch/articles.all.js":
/*!************************************************!*\
!*** ./resources/js/vue/fetch/articles.all.js ***!
\************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "articlesAll": () => (/* binding */ articlesAll)
/* harmony export */ });
/* harmony import */ var _babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @babel/runtime/regenerator */ "./node_modules/@babel/runtime/regenerator/index.js");
/* harmony import */ var _babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0__);
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function
|
(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
var articlesAll = /*#__PURE__*/function () {
var _ref = _asyncToGenerator( /*#__PURE__*/_babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0___default().mark(function _callee() {
var _res$data, rawRes, res;
return _babel_runtime_regenerator__WEBPACK_IMPORTED_MODULE_0___default().wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
_context.prev = 0;
_context.next = 3;
return fetch('/api/articles', {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json'
}
});
case 3:
rawRes = _context.sent;
_context.next = 6;
return rawRes.json();
case 6:
res = _context.sent;
if (!(rawRes.status === 404)) {
_context.next = 9;
break;
}
return _context.abrupt("return", Promise.resolve([]));
case 9:
if (!res.error) {
_context.next = 11;
break;
}
return _context.abrupt("return", Promise.reject(res.error));
case 11:
return _context.abrupt("return", Promise.resolve((_res$data = res === null || res === void 0 ? void 0 : res.data) !== null && _res$data !== void 0 ? _res$data : []));
case 14:
_context.prev = 14;
_context.t0 = _context["catch"](0);
console.error(_context.t0.message);
return _context.abrupt("return", Promise.reject(_context.t0.message));
case 18:
case "end":
return _context.stop();
}
}
}, _callee, null, [[0, 14]]);
}));
return function articlesAll() {
return _ref.apply(this, arguments);
};
}();
/***/ }),
/***/ "./resources/js/vue/components/Articles/Articles.vue":
/*!***********************************************************!*\
!*** ./resources/js/vue/components/Articles/Articles.vue ***!
\***********************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _Articles_vue_vue_type_template_id_1e9bd7fd__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./Articles.vue?vue&type=template&id=1e9bd7fd */ "./resources/js/vue/components/Articles/Articles.vue?vue&type=template&id=1e9bd7fd");
/* harmony import */ var _Articles_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./Articles.vue?vue&type=script&lang=js */ "./resources/js/vue/components/Articles/Articles.vue?vue&type=script&lang=js");
_Articles_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"].render = _Articles_vue_vue_type_template_id_1e9bd7fd__WEBPACK_IMPORTED_MODULE_0__.render
/* hot reload */
if (false) {}
_Articles_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"].__file = "resources/js/vue/components/Articles/Articles.vue"
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (_Articles_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"]);
/***/ }),
/***/ "./resources/js/vue/components/Card/Card.Article.vue":
/*!***********************************************************!*\
!*** ./resources/js/vue/components/Card/Card.Article.vue ***!
\***********************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _Card_Article_vue_vue_type_template_id_08c8c276__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./Card.Article.vue?vue&type=template&id=08c8c276 */ "./resources/js/vue/components/Card/Card.Article.vue?vue&type=template&id=08c8c276");
/* harmony import */ var _Card_Article_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./Card.Article.vue?vue&type=script&lang=js */ "./resources/js/vue/components/Card/Card.Article.vue?vue&type=script&lang=js");
_Card_Article_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"].render = _Card_Article_vue_vue_type_template_id_08c8c276__WEBPACK_IMPORTED_MODULE_0__.render
/* hot reload */
if (false) {}
_Card_Article_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"].__file = "resources/js/vue/components/Card/Card.Article.vue"
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (_Card_Article_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"]);
/***/ }),
/***/ "./resources/js/vue/components/Loader/FlexCircle.Loader.vue":
/*!******************************************************************!*\
!*** ./resources/js/vue/components/Loader/FlexCircle.Loader.vue ***!
\******************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _FlexCircle_Loader_vue_vue_type_template_id_0b1c155e__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./FlexCircle.Loader.vue?vue&type=template&id=0b1c155e */ "./resources/js/vue/components/Loader/FlexCircle.Loader.vue?vue&type=template&id=0b1c155e");
/* harmony import */ var _FlexCircle_Loader_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./FlexCircle.Loader.vue?vue&type=script&lang=js */ "./resources/js/vue/components/Loader/FlexCircle.Loader.vue?vue&type=script&lang=js");
_FlexCircle_Loader_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"].render = _FlexCircle_Loader_vue_vue_type_template_id_0b1c155e__WEBPACK_IMPORTED_MODULE_0__.render
/* hot reload */
if (false) {}
_FlexCircle_Loader_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"].__file = "resources/js/vue/components/Loader/FlexCircle.Loader.vue"
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (_FlexCircle_Loader_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"]);
/***/ }),
/***/ "./resources/js/vue/views/Articles.vue":
/*!*********************************************!*\
!*** ./resources/js/vue/views/Articles.vue ***!
\*********************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (__WEBPACK_DEFAULT_EXPORT__)
/* harmony export */ });
/* harmony import */ var _Articles_vue_vue_type_template_id_ee95e7f6__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./Articles.vue?vue&type=template&id=ee95e7f6 */ "./resources/js/vue/views/Articles.vue?vue&type=template&id=ee95e7f6");
/* harmony import */ var _Articles_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./Articles.vue?vue&type=script&lang=js */ "./resources/js/vue/views/Articles.vue?vue&type=script&lang=js");
_Articles_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"].render = _Articles_vue_vue_type_template_id_ee95e7f6__WEBPACK_IMPORTED_MODULE_0__.render
/* hot reload */
if (false) {}
_Articles_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"].__file = "resources/js/vue/views/Articles.vue"
/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (_Articles_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_1__["default"]);
/***/ }),
/***/ "./resources/js/vue/components/Articles/Articles.vue?vue&type=script&lang=js":
/*!***********************************************************************************!*\
!*** ./resources/js/vue/components/Articles/Articles.vue?vue&type=script&lang=js ***!
\***********************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (/* reexport safe */ _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_Articles_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_0__["default"])
/* harmony export */ });
/* harmony import */ var _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_Articles_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!../../../../../node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./Articles.vue?vue&type=script&lang=js */ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Articles/Articles.vue?vue&type=script&lang=js");
/***/ }),
/***/ "./resources/js/vue/components/Card/Card.Article.vue?vue&type=script&lang=js":
/*!***********************************************************************************!*\
!*** ./resources/js/vue/components/Card/Card.Article.vue?vue&type=script&lang=js ***!
\***********************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (/* reexport safe */ _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_Card_Article_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_0__["default"])
/* harmony export */ });
/* harmony import */ var _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_Card_Article_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!../../../../../node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./Card.Article.vue?vue&type=script&lang=js */ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Card/Card.Article.vue?vue&type=script&lang=js");
/***/ }),
/***/ "./resources/js/vue/components/Loader/FlexCircle.Loader.vue?vue&type=script&lang=js":
/*!******************************************************************************************!*\
!*** ./resources/js/vue/components/Loader/FlexCircle.Loader.vue?vue&type=script&lang=js ***!
\******************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (/* reexport safe */ _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_FlexCircle_Loader_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_0__["default"])
/* harmony export */ });
/* harmony import */ var _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_FlexCircle_Loader_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!../../../../../node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./FlexCircle.Loader.vue?vue&type=script&lang=js */ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Loader/FlexCircle.Loader.vue?vue&type=script&lang=js");
/***/ }),
/***/ "./resources/js/vue/views/Articles.vue?vue&type=script&lang=js":
/*!*********************************************************************!*\
!*** ./resources/js/vue/views/Articles.vue?vue&type=script&lang=js ***!
\*********************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (/* reexport safe */ _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_Articles_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_0__["default"])
/* harmony export */ });
/* harmony import */ var _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_Articles_vue_vue_type_script_lang_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!../../../../node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./Articles.vue?vue&type=script&lang=js */ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/views/Articles.vue?vue&type=script&lang=js");
/***/ }),
/***/ "./resources/js/vue/components/Articles/Articles.vue?vue&type=template&id=1e9bd7fd":
/*!*****************************************************************************************!*\
!*** ./resources/js/vue/components/Articles/Articles.vue?vue&type=template&id=1e9bd7fd ***!
\*****************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* reexport safe */ _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_templateLoader_js_ruleSet_1_rules_2_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_Articles_vue_vue_type_template_id_1e9bd7fd__WEBPACK_IMPORTED_MODULE_0__.render)
/* harmony export */ });
/* harmony import */ var _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_templateLoader_js_ruleSet_1_rules_2_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_Articles_vue_vue_type_template_id_1e9bd7fd__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!../../../../../node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!../../../../../node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./Articles.vue?vue&type=template&id=1e9bd7fd */ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Articles/Articles.vue?vue&type=template&id=1e9bd7fd");
/***/ }),
/***/ "./resources/js/vue/components/Card/Card.Article.vue?vue&type=template&id=08c8c276":
/*!*****************************************************************************************!*\
!*** ./resources/js/vue/components/Card/Card.Article.vue?vue&type=template&id=08c8c276 ***!
\*****************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* reexport safe */ _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_templateLoader_js_ruleSet_1_rules_2_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_Card_Article_vue_vue_type_template_id_08c8c276__WEBPACK_IMPORTED_MODULE_0__.render)
/* harmony export */ });
/* harmony import */ var _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_templateLoader_js_ruleSet_1_rules_2_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_Card_Article_vue_vue_type_template_id_08c8c276__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!../../../../../node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!../../../../../node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./Card.Article.vue?vue&type=template&id=08c8c276 */ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Card/Card.Article.vue?vue&type=template&id=08c8c276");
/***/ }),
/***/ "./resources/js/vue/components/Loader/FlexCircle.Loader.vue?vue&type=template&id=0b1c155e":
/*!************************************************************************************************!*\
!*** ./resources/js/vue/components/Loader/FlexCircle.Loader.vue?vue&type=template&id=0b1c155e ***!
\************************************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* reexport safe */ _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_templateLoader_js_ruleSet_1_rules_2_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_FlexCircle_Loader_vue_vue_type_template_id_0b1c155e__WEBPACK_IMPORTED_MODULE_0__.render)
/* harmony export */ });
/* harmony import */ var _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_templateLoader_js_ruleSet_1_rules_2_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_FlexCircle_Loader_vue_vue_type_template_id_0b1c155e__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!../../../../../node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!../../../../../node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./FlexCircle.Loader.vue?vue&type=template&id=0b1c155e */ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/components/Loader/FlexCircle.Loader.vue?vue&type=template&id=0b1c155e");
/***/ }),
/***/ "./resources/js/vue/views/Articles.vue?vue&type=template&id=ee95e7f6":
/*!***************************************************************************!*\
!*** ./resources/js/vue/views/Articles.vue?vue&type=template&id=ee95e7f6 ***!
\***************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "render": () => (/* reexport safe */ _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_templateLoader_js_ruleSet_1_rules_2_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_Articles_vue_vue_type_template_id_ee95e7f6__WEBPACK_IMPORTED_MODULE_0__.render)
/* harmony export */ });
/* harmony import */ var _node_modules_babel_loader_lib_index_js_clonedRuleSet_5_use_0_node_modules_vue_loader_dist_templateLoader_js_ruleSet_1_rules_2_node_modules_vue_loader_dist_index_js_ruleSet_0_use_0_Articles_vue_vue_type_template_id_ee95e7f6__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!../../../../node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!../../../../node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./Articles.vue?vue&type=template&id=ee95e7f6 */ "./node_modules/babel-loader/lib/index.js??clonedRuleSet-5.use[0]!./node_modules/vue-loader/dist/templateLoader.js??ruleSet[1].rules[2]!./node_modules/vue-loader/dist/index.js??ruleSet[0].use[0]!./resources/js/vue/views/Articles.vue?vue&type=template&id=ee95e7f6");
/***/ })
}]);
|
_next
|
add_secret.go
|
package controller
import (
"github.com/choerodon/choerodon-cluster-agent/pkg/controller/secret"
)
func
|
() {
// AddToManagerFuncs is a list of functions to create controllers and add them to a manager.
AddToManagerFuncs = append(AddToManagerFuncs, secret.Add)
}
|
init
|
q.py
|
from queue import Queue
from threading import Thread
from time import sleep
from bripy.bllb.logging import logger, DBG
def unloadq(q, stop, limit=2000, rest=.1, check=100):
i = limit
loops = 0
results = []
while True and ((i and not stop()) or q.qsize()):
loops += 1
if loops % check == 0:
DBG(i, loops, len(results))
if q.qsize():
x = q.get()
DBG(x)
results.append(x)
i = min(i + 1, limit)
else:
i -= 1
if i % check == 0:
DBG(i)
sleep(rest)
return results
def multiplex(n, q, **kwargs):
""" Convert one queue into several equivalent Queues
>>> q1, q2, q3 = multiplex(3, in_q)
"""
out_queues = [Queue(**kwargs) for i in range(n)]
def
|
():
while True:
x = q.get()
for out_q in out_queues:
out_q.put(x)
t = Thread(target=f)
t.daemon = True
t.start()
return out_queues
def push(in_q, out_q):
while True:
x = in_q.get()
out_q.put(x)
def merge(*in_qs, **kwargs):
""" Merge multiple queues together
>>> out_q = merge(q1, q2, q3)
"""
out_q = Queue(**kwargs)
threads = [Thread(target=push, args=(q, out_q)) for q in in_qs]
for t in threads:
t.daemon = True
t.start()
return out_q
def iterq(q):
while q.qsize():
yield q.get()
def get_q(q):
results = []
while not q.empty() or q.qsize():
item = q.get()
if item == 'STOP':
DBG('STOP get_q')
q.task_done()
break
DBG(item)
if item:
results.append(item)
q.task_done()
return results
|
f
|
encode.rs
|
// Copyright (C) 2020 - 2022, J2 Innovations
//! Implement Zinc encoding
use crate::haystack::val::{
Bool, Column, Coord, Date, DateTime, Dict, Grid, List, Marker, Na, Number, Ref, Remove, Str,
Symbol, Time, Uri, Value, XStr,
};
use chrono::SecondsFormat;
use std::fmt::Display;
/// Zinc encoding version
pub const VER: f32 = 3.0;
/// Zinc encoding trait implemented by scalar and collection types
pub trait ToZinc {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()>;
/// Encodes this Haystack type as a Zinc string
///
/// # Example
/// ```
/// use libhaystack::val::*;
/// use libhaystack::encoding::zinc::encode::*;
/// use libhaystack::units::get_unit_or_default;
/// let val = Number::make_with_unit(100.0, get_unit_or_default("s"));
/// assert_eq!(val.to_zinc_string(), Ok("100s".to_string()));
/// ```
fn to_zinc_string(&self) -> Result<String> {
let mut output = Vec::new();
self.to_zinc(&mut output)?;
Ok(String::from_utf8(output)?)
}
}
/// Function that take a haystack [Value](crate::val::Value)
/// and returns its Zinc string encoding
///
/// # Example
/// ```
/// use libhaystack::val::*;
/// use libhaystack::encoding::zinc::encode::*;
/// let val = Value::make_true();
/// assert_eq!(to_zinc_string(&val), Ok("T".to_string()));
///
pub fn to_zinc_string(value: &Value) -> Result<String> {
let mut output = Vec::new();
value.to_zinc(&mut output)?;
Ok(String::from_utf8(output)?)
}
#[derive(PartialEq, PartialOrd, Clone, Debug)]
enum InnerGrid {
Yes,
No,
}
/// Specialized trait for encoding inner Grids
trait ZincEncode: ToZinc {
fn zinc_encode<W: std::io::Write>(&self, writer: &mut W, in_grid: InnerGrid) -> Result<()>;
}
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Clone, Debug, PartialEq)]
pub enum Error {
Message(String),
}
impl Display for Error {
fn fmt(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Error::Message(msg) => formatter.write_str(msg),
}
}
}
impl std::error::Error for Error {}
impl From<std::fmt::Error> for Error {
fn from(_: std::fmt::Error) -> Self {
Error::from("Format error.")
}
}
impl From<std::io::Error> for Error {
fn from(_: std::io::Error) -> Self {
Error::from("IO error.")
}
}
impl From<&str> for Error {
fn from(msg: &str) -> Self {
Error::Message(String::from(msg))
}
}
impl From<std::string::FromUtf8Error> for Error {
fn from(_: std::string::FromUtf8Error) -> Self {
Error::from("Utf8 encoding error.")
}
}
fn write_str<W: std::io::Write>(writer: &mut W, s: &str) -> Result<()> {
let bytes = s.as_bytes();
writer.write_all(bytes)?;
Ok(())
}
impl ToZinc for Marker {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
writer.write_all(b"M")?;
Ok(())
}
}
impl ToZinc for Remove {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
writer.write_all(b"R")?;
Ok(())
}
}
impl ToZinc for Na {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
writer.write_all(b"NA")?;
Ok(())
}
}
impl ToZinc for Bool {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
if self.value {
writer.write_all(b"T")?
} else {
writer.write_all(b"F")?
}
Ok(())
}
}
impl ToZinc for Number {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
if self.value.is_nan() {
writer.write_all(b"NaN")?
} else if self.value.is_infinite() {
let sign = if self.value.is_sign_negative() {
"-"
} else {
""
};
writer.write_fmt(format_args!("{}INF", sign))?
} else if let Some(unit) = &self.unit {
writer.write_fmt(format_args!("{}{}", self.value, unit))?
} else {
writer.write_fmt(format_args!("{}", self.value))?
}
Ok(())
}
}
impl ToZinc for Date {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
write_str(writer, &self.to_string())?;
Ok(())
}
}
impl ToZinc for Time {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
write_str(writer, &self.to_string())?;
Ok(())
}
}
impl ToZinc for DateTime {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
if self.is_utc() {
write_str(writer, &self.to_rfc3339_opts(SecondsFormat::AutoSi, true))?;
} else {
writer.write_fmt(format_args!(
"{} {}",
&self.to_rfc3339_opts(SecondsFormat::AutoSi, true),
&self.timezone_short_name()
))?
}
Ok(())
}
}
impl ToZinc for Str {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
writer.write_all(b"\"")?;
let mut buf = [0; 4];
for c in self.value.chars() {
if c < ' ' || c == '"' || c == '\\' {
match c {
'"' => writer.write_all(br#"\""#)?,
'\t' => writer.write_all(br#"\t"#)?,
'\r' => writer.write_all(br#"\r"#)?,
'\n' => writer.write_all(br#"\n"#)?,
'\\' => writer.write_all(br#"\\"#)?,
_ => writer.write_fmt(format_args!("\\u{:04x}", c as u32))?,
}
} else if c == '$' {
writer.write_all(br#"\$"#)?
} else {
let chunk = c.encode_utf8(&mut buf);
writer.write_fmt(format_args!("{}", chunk))?
}
}
writer.write_all(b"\"")?;
Ok(())
}
}
impl ToZinc for Ref {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()>
|
}
impl ToZinc for Symbol {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
writer.write_fmt(format_args!("^{}", self.value))?;
Ok(())
}
}
impl ToZinc for Uri {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
writer.write_all(b"`")?;
for c in self.value.chars() {
if c < ' ' {
continue;
}
match c {
'`' => writer.write_all(br#"\`"#)?,
'\\' => writer.write_all(br#"\\"#)?,
'\x20'..='\x7e' => writer.write_all(&[c as u8])?,
_ => writer.write_fmt(format_args!("\\u{:04x}", c as u32))?,
}
}
writer.write_all(b"`")?;
Ok(())
}
}
impl ToZinc for XStr {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
writer.write_fmt(format_args!(
"{}{}(\"{}\")",
self.r#type[0..1].to_uppercase(),
&self.r#type[1..],
self.value
))?;
Ok(())
}
}
impl ToZinc for Coord {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
writer.write_fmt(format_args!("C({},{})", self.lat, self.long))?;
Ok(())
}
}
impl ToZinc for Grid {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
self.zinc_encode(writer, InnerGrid::No)
}
}
impl ZincEncode for Grid {
fn zinc_encode<W: std::io::Write>(&self, writer: &mut W, in_grid: InnerGrid) -> Result<()> {
if in_grid == InnerGrid::Yes {
writer.write_all(b"<<\n")?;
}
writer.write_fmt(format_args!("ver:\"{:.1}\"\n", VER))?;
// Grid meta
if let Some(meta) = &self.meta {
write_dict_tags(writer, meta, b" ")?;
}
if self.is_empty() {
// No rows to be written
writer.write_all(b"empty\n")?;
} else {
// Columns
for (i, col) in self.columns.iter().enumerate() {
col.to_zinc(writer)?;
if i < self.columns.len() - 1 {
writer.write_all(b",")?;
}
}
writer.write_all(b"\n")?;
// Rows
for row in &self.rows {
// Tags
for (i, col) in self.columns.iter().enumerate() {
if let Some(tag) = row.get(&col.name) {
tag.zinc_encode(writer, InnerGrid::Yes)?;
}
if i < self.columns.len() - 1 {
writer.write_all(b",")?;
}
}
writer.write_all(b"\n")?;
}
}
if in_grid == InnerGrid::Yes {
writer.write_all(b">>")?;
} else {
writer.write_all(b"\n")?;
}
Ok(())
}
}
impl ToZinc for List {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
writer.write_all(b"[")?;
for (i, el) in self.iter().enumerate() {
el.zinc_encode(writer, InnerGrid::Yes)?;
if i < self.len() - 1 {
writer.write_all(b",")?;
}
}
writer.write_all(b"]")?;
Ok(())
}
}
impl ToZinc for Dict {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
writer.write_all(b"{")?;
write_dict_tags(writer, self, b",")?;
writer.write_all(b"}")?;
Ok(())
}
}
/// Implement the Zinc encoding for Haystack Value type
impl ToZinc for Value {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
self.zinc_encode(writer, InnerGrid::No)
}
}
/// Implements `ZincEncode` for the `Value` type
/// This implementation deals with nested grids.
impl ZincEncode for Value {
fn zinc_encode<W: std::io::Write>(&self, writer: &mut W, in_grid: InnerGrid) -> Result<()> {
match self {
Value::Null => writer.write_all(b"N")?,
Value::Remove => Remove.to_zinc(writer)?,
Value::Marker => Marker.to_zinc(writer)?,
Value::Bool(val) => val.to_zinc(writer)?,
Value::Na => Na.to_zinc(writer)?,
Value::Number(val) => val.to_zinc(writer)?,
Value::Str(val) => val.to_zinc(writer)?,
Value::Ref(val) => val.to_zinc(writer)?,
Value::Uri(val) => val.to_zinc(writer)?,
Value::Symbol(val) => val.to_zinc(writer)?,
Value::Date(val) => val.to_zinc(writer)?,
Value::Time(val) => val.to_zinc(writer)?,
Value::DateTime(val) => val.to_zinc(writer)?,
Value::Coord(val) => val.to_zinc(writer)?,
Value::XStr(val) => val.to_zinc(writer)?,
Value::List(val) => val.to_zinc(writer)?,
Value::Dict(val) => val.to_zinc(writer)?,
Value::Grid(val) => val.zinc_encode(writer, in_grid)?,
}
Ok(())
}
}
/// Serialize a Grid column
impl ToZinc for Column {
fn to_zinc<W: std::io::Write>(&self, writer: &mut W) -> Result<()> {
write_str(writer, &self.name)?;
if let Some(meta) = &self.meta {
write_dict_tags(writer, meta, b" ")?;
}
Ok(())
}
}
fn write_dict_tags<W: std::io::Write>(
writer: &mut W,
dict: &Dict,
separator: &[u8; 1],
) -> Result<()> {
for (pos, (k, v)) in dict.iter().enumerate() {
write_str(writer, k)?;
if !v.is_marker() {
writer.write_all(b":")?;
v.zinc_encode(writer, InnerGrid::Yes)?;
}
if pos < dict.len() - 1 {
writer.write_all(separator)?;
}
}
Ok(())
}
|
{
if let Some(dis) = &self.dis {
writer.write_fmt(format_args!("@{} \"{}\"", self.value, dis))?
} else {
writer.write_fmt(format_args!("@{}", self.value))?
}
Ok(())
}
|
estimote.py
|
from construct import Struct, Byte, Switch, Int8sl, Array, Int8ul
from ..const import ESTIMOTE_TELEMETRY_SUBFRAME_A, ESTIMOTE_TELEMETRY_SUBFRAME_B
# pylint: disable=invalid-name
EstimoteTelemetrySubFrameA = Struct(
"acceleration" / Array(3, Int8sl),
"previous_motion" / Byte,
"current_motion" / Byte,
"combined_fields" / Array(5, Byte),
)
EstimoteTelemetrySubFrameB = Struct(
"magnetic_field" / Array(3, Int8sl),
"ambient_light" / Int8ul,
"combined_fields" / Array(5, Byte),
"battery_level" / Int8ul,
)
EstimoteTelemetryFrame = Struct(
"identifier" / Array(8, Byte),
"subframe_type" / Byte,
"sub_frame" / Switch(lambda ctx: ctx.subframe_type, {
ESTIMOTE_TELEMETRY_SUBFRAME_A: EstimoteTelemetrySubFrameA,
ESTIMOTE_TELEMETRY_SUBFRAME_B: EstimoteTelemetrySubFrameB,
})
)
|
"""All low level structures used for parsing Estimote packets."""
|
|
glog_test.go
|
// Go support for leveled logs, analogous to https://code.google.com/p/google-glog/
//
// Copyright 2013 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cglog
import (
"bytes"
"fmt"
stdLog "log"
"path/filepath"
"runtime"
"strconv"
"strings"
"testing"
"time"
)
// Test that shortHostname works as advertised.
func TestShortHostname(t *testing.T) {
for hostname, expect := range map[string]string{
"": "",
"host": "host",
"host.google.com": "host",
} {
if got := shortHostname(hostname); expect != got {
t.Errorf("shortHostname(%q): expected %q, got %q", hostname, expect, got)
}
}
}
// flushBuffer wraps a bytes.Buffer to satisfy flushSyncWriter.
type flushBuffer struct {
bytes.Buffer
}
func (f *flushBuffer) Flush() error {
return nil
}
func (f *flushBuffer) Sync() error {
return nil
}
// swap sets the log writers and returns the old array.
func (l *loggingT) swap(writers [numSeverity]flushSyncWriter) (old [numSeverity]flushSyncWriter) {
l.mu.Lock()
defer l.mu.Unlock()
old = l.file
for i, w := range writers {
logging.file[i] = w
}
return
}
// newBuffers sets the log writers to all new byte buffers and returns the old array.
func (l *loggingT) newBuffers() [numSeverity]flushSyncWriter {
return l.swap([numSeverity]flushSyncWriter{new(flushBuffer), new(flushBuffer), new(flushBuffer), new(flushBuffer)})
}
// contents returns the specified log value as a string.
func contents(s severity) string {
return logging.file[s].(*flushBuffer).String()
}
// contains reports whether the string is contained in the log.
func contains(s severity, str string, t *testing.T) bool {
return strings.Contains(contents(s), str)
}
// setFlags configures the logging flags how the test expects them.
func setFlags() {
logging.toStderr = false
}
// Test that Info works as advertised.
func TestInfo(t *testing.T) {
setFlags()
defer logging.swap(logging.newBuffers())
Info("test")
if !contains(infoLog, "I", t) {
t.Errorf("Info has wrong character: %q", contents(infoLog))
}
if !contains(infoLog, "test", t) {
t.Error("Info failed")
}
}
func TestInfoDepth(t *testing.T) {
setFlags()
defer logging.swap(logging.newBuffers())
f := func() { InfoDepth(1, "depth-test1") }
// The next three lines must stay together
_, _, wantLine, _ := runtime.Caller(0)
InfoDepth(0, "depth-test0")
f()
msgs := strings.Split(strings.TrimSuffix(contents(infoLog), "\n"), "\n")
if len(msgs) != 2 {
t.Fatalf("Got %d lines, expected 2", len(msgs))
}
for i, m := range msgs {
if !strings.HasPrefix(m, "I") {
t.Errorf("InfoDepth[%d] has wrong character: %q", i, m)
}
w := fmt.Sprintf("depth-test%d", i)
if !strings.Contains(m, w) {
t.Errorf("InfoDepth[%d] missing %q: %q", i, w, m)
}
// pull out the line number (between : and ])
msg := m[strings.LastIndex(m, ":")+1:]
x := strings.Index(msg, "]")
if x < 0 {
t.Errorf("InfoDepth[%d]: missing ']': %q", i, m)
continue
}
line, err := strconv.Atoi(msg[:x])
if err != nil {
t.Errorf("InfoDepth[%d]: bad line number: %q", i, m)
continue
}
wantLine++
if wantLine != line {
t.Errorf("InfoDepth[%d]: got line %d, want %d", i, line, wantLine)
}
}
}
func init() {
CopyStandardLogTo("INFO")
}
// Test that CopyStandardLogTo panics on bad input.
func TestCopyStandardLogToPanic(t *testing.T) {
defer func() {
if s, ok := recover().(string); !ok || !strings.Contains(s, "LOG") {
t.Errorf(`CopyStandardLogTo("LOG") should have panicked: %v`, s)
}
}()
CopyStandardLogTo("LOG")
}
// Test that using the standard log package logs to INFO.
func TestStandardLog(t *testing.T) {
setFlags()
defer logging.swap(logging.newBuffers())
stdLog.Print("test")
if !contains(infoLog, "I", t) {
t.Errorf("Info has wrong character: %q", contents(infoLog))
}
if !contains(infoLog, "test", t) {
t.Error("Info failed")
}
}
// Test that the header has the correct format.
func TestHeader(t *testing.T) {
setFlags()
defer logging.swap(logging.newBuffers())
defer func(previous func() time.Time) { timeNow = previous }(timeNow)
timeNow = func() time.Time {
return time.Date(2006, 1, 2, 15, 4, 5, .067890e9, time.Local)
}
pid = 1234
Info("test")
var line int
format := "I0102 15:04:05.067890 1234 glog_test.go:%d] test\n"
n, err := fmt.Sscanf(contents(infoLog), format, &line)
if n != 1 || err != nil {
t.Errorf("log format error: %d elements, error %s:\n%s", n, err, contents(infoLog))
}
// Scanf treats multiple spaces as equivalent to a single space,
// so check for correct space-padding also.
want := fmt.Sprintf(format, line)
if contents(infoLog) != want {
t.Errorf("log format error: got:\n\t%q\nwant:\t%q", contents(infoLog), want)
}
}
// Test that an Error log goes to Warning and Info.
// Even in the Info log, the source character will be E, so the data should
// all be identical.
func TestError(t *testing.T) {
setFlags()
defer logging.swap(logging.newBuffers())
Error("test")
if !contains(errorLog, "E", t) {
t.Errorf("Error has wrong character: %q", contents(errorLog))
}
if !contains(errorLog, "test", t) {
t.Error("Error failed")
}
str := contents(errorLog)
if !contains(warningLog, str, t) {
t.Error("Warning failed")
}
if !contains(infoLog, str, t) {
t.Error("Info failed")
}
}
// Test that a Warning log goes to Info.
// Even in the Info log, the source character will be W, so the data should
// all be identical.
func TestWarning(t *testing.T) {
setFlags()
defer logging.swap(logging.newBuffers())
Warning("test")
if !contains(warningLog, "W", t) {
t.Errorf("Warning has wrong character: %q", contents(warningLog))
}
if !contains(warningLog, "test", t) {
t.Error("Warning failed")
}
str := contents(warningLog)
if !contains(infoLog, str, t) {
t.Error("Info failed")
}
}
// Test that a V log goes to Info.
func TestV(t *testing.T) {
setFlags()
defer logging.swap(logging.newBuffers())
logging.verbosity.Set("2")
defer logging.verbosity.Set("0")
V(2).Info("test")
if !contains(infoLog, "I", t) {
t.Errorf("Info has wrong character: %q", contents(infoLog))
}
if !contains(infoLog, "test", t) {
t.Error("Info failed")
}
}
// Test that a vmodule enables a log in this file.
func
|
(t *testing.T) {
setFlags()
defer logging.swap(logging.newBuffers())
logging.vmodule.Set("glog_test=2")
defer logging.vmodule.Set("")
if !V(1) {
t.Error("V not enabled for 1")
}
if !V(2) {
t.Error("V not enabled for 2")
}
if V(3) {
t.Error("V enabled for 3")
}
V(2).Info("test")
if !contains(infoLog, "I", t) {
t.Errorf("Info has wrong character: %q", contents(infoLog))
}
if !contains(infoLog, "test", t) {
t.Error("Info failed")
}
}
// Test that a vmodule of another file does not enable a log in this file.
func TestVmoduleOff(t *testing.T) {
setFlags()
defer logging.swap(logging.newBuffers())
logging.vmodule.Set("notthisfile=2")
defer logging.vmodule.Set("")
for i := 1; i <= 3; i++ {
if V(Level(i)) {
t.Errorf("V enabled for %d", i)
}
}
V(2).Info("test")
if contents(infoLog) != "" {
t.Error("V logged incorrectly")
}
}
// vGlobs are patterns that match/don't match this file at V=2.
var vGlobs = map[string]bool{
// Easy to test the numeric match here.
"glog_test=1": false, // If -vmodule sets V to 1, V(2) will fail.
"glog_test=2": true,
"glog_test=3": true, // If -vmodule sets V to 1, V(3) will succeed.
// These all use 2 and check the patterns. All are true.
"*=2": true,
"?l*=2": true,
"????_*=2": true,
"??[mno]?_*t=2": true,
// These all use 2 and check the patterns. All are false.
"*x=2": false,
"m*=2": false,
"??_*=2": false,
"?[abc]?_*t=2": false,
}
// Test that vmodule globbing works as advertised.
func testVmoduleGlob(pat string, match bool, t *testing.T) {
setFlags()
defer logging.swap(logging.newBuffers())
defer logging.vmodule.Set("")
logging.vmodule.Set(pat)
if V(2) != Verbose(match) {
t.Errorf("incorrect match for %q: got %t expected %t", pat, V(2), match)
}
}
// Test that a vmodule globbing works as advertised.
func TestVmoduleGlob(t *testing.T) {
for glob, match := range vGlobs {
testVmoduleGlob(glob, match, t)
}
}
func TestRollover(t *testing.T) {
setFlags()
var err error
defer func(previous func(error)) { logExitFunc = previous }(logExitFunc)
logExitFunc = func(e error) {
err = e
}
defer func(previous uint64) { MaxSize = previous }(MaxSize)
MaxSize = 512
Info("x") // Be sure we have a file.
info, ok := logging.file[infoLog].(*syncBuffer)
if !ok {
t.Fatal("info wasn't created")
}
if err != nil {
t.Fatalf("info has initial error: %v", err)
}
fname0 := info.file.Name()
Info(strings.Repeat("x", int(MaxSize))) // force a rollover
if err != nil {
t.Fatalf("info has error after big write: %v", err)
}
// Make sure the next log file gets a file name with a different
// time stamp.
//
// TODO: determine whether we need to support subsecond log
// rotation. C++ does not appear to handle this case (nor does it
// handle Daylight Savings Time properly).
time.Sleep(1 * time.Second)
Info("x") // create a new file
if err != nil {
t.Fatalf("error after rotation: %v", err)
}
fname1 := info.file.Name()
if fname0 == fname1 {
t.Errorf("info.f.Name did not change: %v", fname0)
}
if info.nbytes >= MaxSize {
t.Errorf("file size was not reset: %d", info.nbytes)
}
}
func TestLogBacktraceAt(t *testing.T) {
setFlags()
defer logging.swap(logging.newBuffers())
// The peculiar style of this code simplifies line counting and maintenance of the
// tracing block below.
var infoLine string
setTraceLocation := func(file string, line int, ok bool, delta int) {
if !ok {
t.Fatal("could not get file:line")
}
_, file = filepath.Split(file)
infoLine = fmt.Sprintf("%s:%d", file, line+delta)
err := logging.traceLocation.Set(infoLine)
if err != nil {
t.Fatal("error setting log_backtrace_at: ", err)
}
}
{
// Start of tracing block. These lines know about each other's relative position.
_, file, line, ok := runtime.Caller(0)
setTraceLocation(file, line, ok, +2) // Two lines between Caller and Info calls.
Info("we want a stack trace here")
}
numAppearances := strings.Count(contents(infoLog), infoLine)
if numAppearances < 2 {
// Need 2 appearances, one in the log header and one in the trace:
// log_test.go:281: I0511 16:36:06.952398 02238 log_test.go:280] we want a stack trace here
// ...
// github.com/glog/glog_test.go:280 (0x41ba91)
// ...
// We could be more precise but that would require knowing the details
// of the traceback format, which may not be dependable.
t.Fatal("got no trace back; log is ", contents(infoLog))
}
}
func BenchmarkHeader(b *testing.B) {
for i := 0; i < b.N; i++ {
buf, _, _ := logging.header(infoLog, 0)
logging.putBuffer(buf)
}
}
|
TestVmoduleOn
|
model.rs
|
use std::collections::{HashMap, VecDeque};
use std::fs::File;
use std::io::BufReader;
use std::io::{Read, Seek};
use std::path::{Path, PathBuf};
use std::time::{Duration, Instant};
use std::{cell::RefCell, rc::Rc};
use anyhow::{Context, Result};
use cpal::traits::{DeviceTrait, HostTrait};
use log::{debug, error, info, trace};
use rodio::{source::Source, Sample};
use pandora_api::json::{station::PlaylistTrack, user::Station};
use crate::caching;
use crate::config::{CachePolicy, Config, PartialConfig};
use crate::errors::Error;
use crate::pandora::PandoraSession;
pub(crate) trait StateMediator {
fn disconnected(&self) -> bool;
fn disconnect(&mut self);
fn fail_authentication(&mut self);
fn connected(&self) -> bool;
fn connect(&mut self);
fn tuned(&self) -> Option<String>;
fn tune(&mut self, station_id: String);
fn untune(&mut self);
fn ready(&self) -> bool;
fn playing(&self) -> Option<PlaylistTrack>;
fn update(&mut self) -> bool;
fn quitting(&self) -> bool;
fn quit(&mut self);
}
pub(crate) trait StationMediator {
fn fill_station_list(&mut self);
fn station_list(&self) -> Vec<(String, String)>;
fn station_count(&self) -> usize;
}
pub(crate) trait PlaybackMediator {
fn stopped(&self) -> bool;
fn stop(&mut self);
fn started(&self) -> bool;
fn start(&mut self);
fn elapsed(&self) -> Duration;
fn duration(&self) -> Duration;
}
pub(crate) trait AudioMediator {
fn reset(&mut self);
fn active(&self) -> bool;
fn paused(&self) -> bool;
fn pause(&mut self);
fn unpause(&mut self);
fn toggle_pause(&mut self) {
if self.paused() {
self.unpause();
} else {
self.pause();
}
}
fn volume(&self) -> f32;
fn set_volume(&mut self, new_volume: f32);
fn increase_volume(&mut self) {
self.set_volume(self.volume() + 0.1);
}
fn decrease_volume(&mut self) {
self.set_volume(self.volume() - 0.1);
}
fn refresh_volume(&mut self);
fn muted(&self) -> bool;
fn mute(&mut self);
fn unmute(&mut self);
fn toggle_mute(&mut self) {
if self.muted() {
self.unmute();
} else {
self.mute();
}
}
}
#[derive(Debug, Clone, Copy)]
enum Volume {
Muted(f32),
Unmuted(f32),
}
impl Volume {
fn volume(self) -> f32 {
if let Self::Unmuted(v) = self {
v.min(1.0f32).max(0.0f32)
} else {
0.0f32
}
}
fn set_volume(&mut self, new_volume: f32) {
*self = Self::Unmuted(new_volume.min(1.0f32).max(0.0f32));
}
fn muted(self) -> bool {
match self {
Self::Muted(_) => true,
Self::Unmuted(_) => false,
}
}
fn mute(&mut self) {
let volume = self.volume();
*self = Self::Muted(volume);
}
fn unmute(&mut self) {
let volume = self.volume();
*self = Self::Unmuted(volume);
}
}
impl Default for Volume {
fn default() -> Self {
Self::Unmuted(1.0f32)
}
}
// We can't derive Debug or Clone since the rodio members
// don't implement it
struct AudioDevice {
device: cpal::Device,
// If the stream gets dropped, the device (handle) closes
// so we hold it, but we don't ever use it
_stream: rodio::OutputStream,
handle: rodio::OutputStreamHandle,
sink: rodio::Sink,
volume: Volume,
}
impl AudioDevice {
fn play_m4a_from_path<P>(&mut self, path: P) -> Result<()>
where
P: AsRef<Path>,
{
let decoder: redlux::Decoder<BufReader<File>> = self.decoder_for_path(path)?;
self.play_from_source(decoder)
}
fn decoder_for_path<P: AsRef<Path>>(
&mut self,
path: P,
) -> Result<redlux::Decoder<BufReader<File>>> {
trace!(
"Creating decoder for track at {} for playback",
path.as_ref().to_string_lossy()
);
let file = File::open(path.as_ref()).with_context(|| {
format!(
"Failed opening media file at {}",
path.as_ref().to_string_lossy()
)
})?;
let metadata = file.metadata().with_context(|| {
format!(
"Failed retrieving metadata for media file at {}",
path.as_ref().to_string_lossy()
)
})?;
self.m4a_decoder_for_reader(file, metadata.len())
}
fn m4a_decoder_for_reader<R: Read + Seek + Send + 'static>(
&mut self,
reader: R,
size: u64,
) -> Result<redlux::Decoder<BufReader<R>>> {
let reader = BufReader::new(reader);
redlux::Decoder::new_mpeg4(reader, size)
.with_context(|| "Failed initializing media decoder")
}
fn play_from_source<S>(&mut self, source: S) -> Result<()>
where
S: Source + Send + 'static,
S::Item: Sample,
S::Item: Send,
{
self.reset();
let start_paused = false;
self.sink.append(source.pausable(start_paused));
self.sink.play();
Ok(())
}
}
impl AudioMediator for AudioDevice {
fn reset(&mut self) {
self.sink = rodio::Sink::try_new(&self.handle)
.expect("Failed to initialize audio device for playback");
self.sink.set_volume(self.volume.volume());
}
fn active(&self) -> bool {
!self.sink.empty()
}
fn paused(&self) -> bool {
self.sink.is_paused()
}
fn pause(&mut self) {
self.sink.pause();
}
fn unpause(&mut self) {
self.sink.play()
}
fn volume(&self) -> f32 {
self.volume.volume()
}
fn set_volume(&mut self, new_volume: f32) {
self.volume.set_volume(new_volume);
self.refresh_volume();
}
fn refresh_volume(&mut self) {
self.sink.set_volume(self.volume.volume());
}
fn muted(&self) -> bool {
self.volume.muted()
}
fn mute(&mut self) {
self.volume.mute();
self.refresh_volume();
}
fn unmute(&mut self) {
self.volume.unmute();
self.refresh_volume();
}
}
impl Default for AudioDevice {
fn default() -> Self {
let device = cpal::default_host()
.default_output_device()
.expect("Failed to locate default audio device");
let (_stream, handle) = rodio::OutputStream::try_from_device(&device)
.expect("Failed to initialize audio device for playback");
let sink =
rodio::Sink::try_new(&handle).expect("Failed to initialize audio device for playback");
Self {
device,
_stream,
handle,
sink,
volume: Volume::default(),
}
}
}
impl Clone for AudioDevice {
fn clone(&self) -> Self {
// Since we can't clone the device, we're going to look for the device
// from the output devices list that has the same name as the our
// current one. If none matches, we'll use the default output device.
let device = cpal::default_host()
.devices()
.map(|mut devs| devs.find(|d| d.name().ok() == self.device.name().ok()))
.ok()
.flatten()
.unwrap_or_else(|| {
cpal::default_host()
.default_output_device()
.expect("Failed to locate default audio device")
});
let (_stream, handle) = rodio::OutputStream::try_from_device(&device)
.expect("Failed to initialize audio device for playback");
let sink =
rodio::Sink::try_new(&handle).expect("Failed to initialize audio device for playback");
AudioDevice {
device,
_stream,
handle,
sink,
volume: self.volume,
}
}
}
impl std::fmt::Debug for AudioDevice {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let queued = format!("{} queued", self.sink.len());
let paused = if self.sink.is_paused() {
"paused"
} else {
"not paused"
};
// rodio, around version 0.12, stopped making attributes of the
// underlying audio device available, so we can't report anything
// about it
write!(
f,
"AudioDevice {{ sink: ({}, {}, volume {:.2}), volume: {:?} }}",
queued,
paused,
self.sink.volume(),
self.volume
)
}
}
#[derive(Debug, Clone, Default)]
struct Playing {
audio_device: AudioDevice,
cache_policy: CachePolicy,
last_started: Option<Instant>,
elapsed: Duration,
duration: Duration,
playlist: VecDeque<PlaylistTrack>,
track_cacher: caching::TrackCacher,
}
impl Playing {
#[allow(clippy::field_reassign_with_default)]
fn new(cache_policy: CachePolicy, volume: f32) -> Self {
let mut pl = Self::default();
pl.cache_policy = cache_policy;
pl.set_volume(volume);
pl
}
fn playing(&self) -> Option<PlaylistTrack> {
if self.elapsed() > Duration::default() {
self.playlist.front().cloned()
} else {
None
}
}
fn playlist_len(&self) -> usize {
self.playlist.len() + self.track_cacher.pending_count()
}
fn extend_playlist(&mut self, new_playlist: Vec<PlaylistTrack>) {
self.track_cacher.enqueue(new_playlist);
trace!(
"New playlist length: {}",
self.playlist.len() + self.track_cacher.pending_count()
);
}
fn stop_all(&mut self) {
self.stop();
self.playlist.clear();
self.track_cacher.clear();
}
fn precache_playlist_track(&mut self) {
if let Err(e) = self.track_cacher.update() {
error!("Error while updating track cache: {}", e);
}
self.playlist
.extend(self.track_cacher.get_ready().drain(..));
}
fn evict_playing(&mut self) {
if let Some(track) = self.playlist.pop_front() {
if !self.cache_policy.evict_completed() {
return;
}
if let Some(serde_json::value::Value::String(path)) = track.optional.get("cached") {
let path = PathBuf::from(path);
trace!("Evicting track from cache: {}", path.to_string_lossy());
if let Err(e) = std::fs::remove_file(&path) {
error!(
"Error evicting track {} from cache: {:?}",
path.to_string_lossy(),
e
);
}
}
}
}
}
impl PlaybackMediator for Playing {
fn stopped(&self) -> bool {
if self.active() && self.elapsed() == Duration::default() {
panic!("Application state error: audio device is active, but no track playtime has elapsed.");
}
!self.active()
}
fn stop(&mut self) {
if self.elapsed().as_millis() > 0 {
self.reset();
self.evict_playing();
self.last_started = None;
self.elapsed = Duration::default();
self.duration = Duration::default();
}
}
fn started(&self) -> bool {
if self.active() && self.elapsed() == Duration::default() {
panic!("Application state error: audio device is active, but no track playtime has elapsed.");
}
self.active()
}
fn start(&mut self) {
if self.started() {
trace!("A track is already playing. It needs to be stopped first.");
return;
}
if let Some(track) = self.playlist.front_mut() {
debug!("Starting track: {:?}", &track.song_name);
if let Some(serde_json::value::Value::String(cached)) = track.optional.get("cached") {
trace!("Starting decoding of track {}", cached);
if let Err(e) = self.audio_device.play_m4a_from_path(PathBuf::from(&cached)) {
error!("Error starting track at {}: {:?}", cached, e);
} else {
self.duration = track
.optional
.get("trackLength")
.and_then(|v| v.as_u64())
.map(Duration::from_secs)
.unwrap_or_default();
self.last_started = Some(Instant::now());
trace!("Started track at {}.", cached);
}
} else {
error!("Uncached track in playlist! Evicting...");
self.stop();
}
} else {
trace!("Cannot start track if the playlist is empty.");
}
}
fn elapsed(&self) -> Duration {
let elapsed_since_last_started = self.last_started.map(|i| i.elapsed()).unwrap_or_default();
self.elapsed + elapsed_since_last_started
}
fn duration(&self) -> Duration {
self.duration
}
}
impl AudioMediator for Playing {
fn reset(&mut self) {
self.audio_device.reset();
}
fn active(&self) -> bool {
self.audio_device.active()
}
fn paused(&self) -> bool {
// This returns true when a track has actually been started, but time
// is not elapsing on it.
if self.audio_device.paused() && self.last_started.is_some() {
panic!(
"Application state error: track is paused, but track playtime still increasing."
);
}
self.audio_device.paused()
}
fn pause(&mut self) {
self.elapsed += self
.last_started
.take()
.map(|inst| inst.elapsed())
.unwrap_or_default();
self.audio_device.pause();
}
fn unpause(&mut self) {
if self.elapsed.as_millis() > 0 {
self.last_started.get_or_insert_with(Instant::now);
self.audio_device.unpause();
}
}
fn volume(&self) -> f32 {
self.audio_device.volume()
}
fn set_volume(&mut self, new_volume: f32) {
self.audio_device.set_volume(new_volume)
}
fn refresh_volume(&mut self) {
self.audio_device.refresh_volume();
}
fn muted(&self) -> bool {
self.audio_device.muted()
}
fn mute(&mut self) {
self.audio_device.mute();
}
fn unmute(&mut self) {
self.audio_device.unmute();
}
}
#[derive(Debug, Clone)]
pub(crate) struct Model {
config: Rc<RefCell<Config>>,
session: PandoraSession,
station_list: HashMap<String, Station>,
playing: Playing,
quitting: bool,
dirty: bool,
}
impl Model {
pub(crate) fn new(config: Rc<RefCell<Config>>) -> Self {
let policy = config.borrow_mut().cache_policy();
let volume = config.borrow_mut().volume();
Self {
config: config.clone(),
session: PandoraSession::new(config),
station_list: HashMap::new(),
playing: Playing::new(policy, volume),
quitting: false,
dirty: true,
}
}
pub(crate) fn config(&self) -> Rc<RefCell<Config>> {
self.config.clone()
}
fn refill_playlist(&mut self) {
// If the playing track and at least one more are still
// in the queue, then we don't refill.
let playlist_len = self.playing.playlist_len();
if playlist_len >= 2 {
return;
}
trace!("Playlist length: {}", playlist_len);
if let Some(station) = self.tuned() {
match self.session.get_playlist(&station) {
Ok(playlist) => {
trace!("Extending playlist.");
let playlist: Vec<PlaylistTrack> = playlist
.into_iter()
.filter_map(|pe| pe.get_track())
.collect();
self.playing.extend_playlist(playlist);
self.dirty |= true;
}
Err(e) => error!("Failed while fetching new playlist: {:?}", e),
}
}
}
fn cache_track(&mut self) {
self.playing.precache_playlist_track();
}
pub(crate) fn rate_track(&mut self, rating: Option<bool>) {
if let (Some(track), Some(st_id)) = (self.playing(), self.tuned()) {
let new_rating_value: u32 = if rating.unwrap_or(false) { 1 } else { 0 };
if let Some(rating) = rating {
if let Err(e) = self
.session
.add_feedback(&st_id, &track.track_token, rating)
{
error!("Failed submitting track rating: {:?}", e);
} else {
if let Some(t) = self.playing.playlist.front_mut() {
t.song_rating = new_rating_value;
}
self.dirty |= true;
trace!("Rated track {} with value {}", track.song_name, rating);
}
} else if let Err(e) = self.session.delete_feedback_for_track(&st_id, &track) {
error!("Failed submitting track rating: {:?}", e);
} else {
if let Some(t) = self.playing.playlist.front_mut() {
t.song_rating = new_rating_value;
}
self.dirty |= true;
trace!("Successfully removed track rating.");
}
}
}
pub(crate) fn sleep_track(&mut self) {
if let Err(e) = self
.playing()
.map(|t| self.session.sleep_song(&t.track_token))
.transpose()
{
error!("Failed to sleep track: {:?}", e);
}
self.stop();
}
}
impl StateMediator for Model {
fn disconnected(&self) -> bool {
!self.session.connected()
}
fn disconnect(&mut self) {
// TODO: Evaluate whether session.user_logout() would better suit
self.session.partner_logout();
self.dirty |= true;
}
fn fail_authentication(&mut self) {
let failed_auth =
PartialConfig::default().login(self.config.borrow().login_credentials().as_invalid());
self.dirty |= true;
self.config.borrow_mut().update_from(&failed_auth);
}
fn connected(&self) -> bool {
self.session.connected()
}
fn connect(&mut self) {
if !self.connected() {
trace!("Attempting pandora login...");
if let Err(e) = self.session.user_login() {
if e.downcast_ref::<Error>()
.map(|e| *e == Error::PanharmoniconMissingAuthToken)
.unwrap_or(false)
{
error!("Required authentication token is missing.");
self.fail_authentication();
} else if let Some(e) = e.downcast_ref::<pandora_api::errors::Error>() {
error!("Pandora authentication failure: {:?}", e);
self.fail_authentication();
} else {
error!("Unknown error while logging in: {:?}", e);
self.fail_authentication();
}
} else {
trace!("Successfully logged into Pandora.");
}
self.dirty |= true;
} else {
info!("Connect request ignored. Already connected.");
}
}
fn tuned(&self) -> Option<String> {
self.config.borrow().station_id()
}
fn tune(&mut self, station_id: String) {
if self
.tuned()
.as_ref()
.map(|s| s == &station_id)
.unwrap_or(false)
{
trace!("Requested station is already tuned.");
return;
}
trace!("Updating station on model");
self.config
.borrow_mut()
.update_from(&PartialConfig::default().station(Some(station_id)));
self.dirty |= true;
if !self.connected() {
info!("Cannot start station until connected, but saving station for when connected.");
}
// This will stop the current track and flush the playlist of all queue
// tracks so that later we can fill it with tracks from the new station
if self.started() {
self.playing.stop_all();
self.dirty |= true;
}
}
fn untune(&mut self) {
if self.tuned().is_some() {
self.config
.borrow_mut()
.update_from(&PartialConfig::default().station(None));
self.dirty |= true;
}
// This will stop the current track and flush the playlist of all queue
if self.started() {
self.playing.stop_all();
self.dirty |= true;
}
}
fn ready(&self) -> bool {
self.stopped()
}
fn playing(&self) -> Option<PlaylistTrack> {
self.playing.playing()
}
fn update(&mut self) -> bool {
let mut old_dirty = self.dirty;
// If a track was started, but the audio device is no longer playing it
// force that track out of the playlist
if self.elapsed().as_millis() > 0 && !self.active() {
trace!("Current track finished playing. Evicting from playlist...");
self.playing.stop();
}
if self.connected() {
self.fill_station_list();
if !old_dirty && (old_dirty != self.dirty) {
trace!("fill_station_list dirtied");
old_dirty = self.dirty;
}
self.refill_playlist();
if !old_dirty && (old_dirty != self.dirty) {
trace!("refill_playlist dirtied");
old_dirty = self.dirty;
}
self.cache_track();
if !old_dirty && (old_dirty != self.dirty) {
trace!("cache_track dirtied");
old_dirty = self.dirty;
}
self.start();
if !old_dirty && (old_dirty != self.dirty) {
trace!("start dirtied");
}
} else if self.config.borrow().login_credentials().get().is_some() {
self.disconnect();
self.connect();
if !old_dirty && (old_dirty != self.dirty) {
trace!("connect dirtied");
}
}
let old_dirty = self.dirty;
self.dirty = false;
old_dirty
}
fn quitting(&self) -> bool {
self.quitting
}
fn quit(&mut self) {
trace!("Start quitting the application.");
self.quitting = true;
self.dirty |= true;
}
}
impl StationMediator for Model {
fn fill_station_list(&mut self) {
if !self.station_list.is_empty() {
return;
}
trace!("Filling station list");
self.station_list = self
.session
.get_station_list()
.ok()
.map(|sl| {
sl.stations
.into_iter()
.map(|s| (s.station_id.clone(), s))
.collect()
})
.unwrap_or_default();
self.dirty |= true;
}
fn station_list(&self) -> Vec<(String, String)> {
self.station_list
.values()
.map(|s| (s.station_name.clone(), s.station_id.clone()))
.collect()
}
fn station_count(&self) -> usize {
self.station_list.len()
}
}
impl PlaybackMediator for Model {
fn stopped(&self) -> bool {
self.playing.stopped()
}
fn stop(&mut self) {
if !self.stopped() {
self.playing.stop();
self.dirty |= true;
}
}
fn started(&self) -> bool {
self.playing.started()
}
fn start(&mut self) {
if self.started() {
//trace!("Track already started.");
} else {
trace!("No tracks started yet. Starting next track.");
self.playing.start();
self.dirty |= true;
}
}
fn elapsed(&self) -> Duration {
self.playing.elapsed()
}
fn duration(&self) -> Duration {
self.playing.duration()
}
}
impl AudioMediator for Model {
// TODO: this might require some finesse to get the right
// behavior between either dropping the current-playing track
// or restarting it from the beginning.
fn reset(&mut self) {
self.playing.reset();
self.dirty |= true;
}
fn active(&self) -> bool {
self.playing.active()
}
fn paused(&self) -> bool {
self.playing.paused()
}
fn pause(&mut self) {
if !self.paused() {
self.playing.pause();
self.dirty |= true;
}
}
fn unpause(&mut self) {
if self.paused() {
self.playing.unpause();
self.dirty |= true;
}
}
fn volume(&self) -> f32 {
self.playing.volume()
}
fn set_volume(&mut self, new_volume: f32) {
self.playing.set_volume(new_volume);
self.config
.borrow_mut()
.update_from(&PartialConfig::default().volume(new_volume));
self.dirty |= true;
}
fn refresh_volume(&mut self) {
self.playing.refresh_volume();
self.dirty |= true;
}
fn muted(&self) -> bool
|
fn mute(&mut self) {
if !self.muted() {
self.playing.mute();
self.dirty |= true;
}
}
fn unmute(&mut self) {
if self.muted() {
self.playing.unmute();
self.dirty |= true;
}
}
}
impl Drop for Model {
fn drop(&mut self) {
// If there have been any configuration changes, commit them to disk
trace!("Flushing config file to disk...");
if let Err(e) = self.config.borrow_mut().flush() {
error!("Failed commiting configuration changes to file: {:?}", e);
}
}
}
|
{
self.playing.muted()
}
|
definition.go
|
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build go1.5
package oracle
|
"go/ast"
"go/token"
"go/types"
"github.com/Go-zh/tools/go/loader"
"github.com/Go-zh/tools/oracle/serial"
)
// definition reports the location of the definition of an identifier.
//
// TODO(adonovan): opt: for intra-file references, the parser's
// resolution might be enough; we should start with that.
//
func definition(q *Query) error {
lconf := loader.Config{Build: q.Build}
allowErrors(&lconf)
if _, err := importQueryPackage(q.Pos, &lconf); err != nil {
return err
}
// Load/parse/type-check the program.
lprog, err := lconf.Load()
if err != nil {
return err
}
q.Fset = lprog.Fset
qpos, err := parseQueryPos(lprog, q.Pos, false)
if err != nil {
return err
}
id, _ := qpos.path[0].(*ast.Ident)
if id == nil {
return fmt.Errorf("no identifier here")
}
obj := qpos.info.ObjectOf(id)
if obj == nil {
// Happens for y in "switch y := x.(type)",
// and the package declaration,
// but I think that's all.
return fmt.Errorf("no object for identifier")
}
q.result = &definitionResult{qpos, obj}
return nil
}
type definitionResult struct {
qpos *queryPos
obj types.Object // object it denotes
}
func (r *definitionResult) display(printf printfFunc) {
printf(r.obj, "defined here as %s", r.qpos.objectString(r.obj))
}
func (r *definitionResult) toSerial(res *serial.Result, fset *token.FileSet) {
definition := &serial.Definition{
Desc: r.obj.String(),
}
if pos := r.obj.Pos(); pos != token.NoPos { // Package objects have no Pos()
definition.ObjPos = fset.Position(pos).String()
}
res.Definition = definition
}
|
import (
"fmt"
|
color_transform.rs
|
#[derive(Copy, Clone, Debug)]
pub struct ColorTransform {
pub r_mult: f32,
pub g_mult: f32,
pub b_mult: f32,
pub a_mult: f32,
pub r_add: f32,
pub g_add: f32,
pub b_add: f32,
pub a_add: f32,
}
impl From<swf::ColorTransform> for ColorTransform {
fn from(color_transform: swf::ColorTransform) -> ColorTransform {
ColorTransform {
r_mult: color_transform.r_multiply,
g_mult: color_transform.g_multiply,
b_mult: color_transform.b_multiply,
a_mult: color_transform.a_multiply,
r_add: f32::from(color_transform.r_add) / 255.0,
g_add: f32::from(color_transform.g_add) / 255.0,
b_add: f32::from(color_transform.b_add) / 255.0,
a_add: f32::from(color_transform.a_add) / 255.0,
}
}
}
impl ColorTransform {
#[allow(clippy::float_cmp)]
pub fn is_identity(&self) -> bool {
self.r_mult == 1.0
&& self.g_mult == 1.0
&& self.b_mult == 1.0
&& self.a_mult == 1.0
&& self.r_add == 0.0
&& self.g_add == 0.0
&& self.b_add == 0.0
&& self.a_add == 0.0
}
}
impl std::default::Default for ColorTransform {
fn default() -> ColorTransform {
ColorTransform {
r_mult: 1.0,
b_mult: 1.0,
g_mult: 1.0,
a_mult: 1.0,
r_add: 0.0,
b_add: 0.0,
g_add: 0.0,
a_add: 0.0,
}
}
}
impl std::ops::Mul for ColorTransform {
type Output = Self;
fn mul(self, rhs: Self) -> Self {
ColorTransform {
r_mult: self.r_mult * rhs.r_mult,
g_mult: self.g_mult * rhs.g_mult,
b_mult: self.b_mult * rhs.b_mult,
a_mult: self.a_mult * rhs.a_mult,
r_add: self.r_mult * rhs.r_add + self.r_add,
g_add: self.g_mult * rhs.g_add + self.g_add,
b_add: self.b_mult * rhs.b_add + self.b_add,
a_add: self.a_mult * rhs.a_add + self.a_add,
}
}
}
impl std::ops::MulAssign for ColorTransform {
fn mul_assign(&mut self, rhs: Self) {
*self = ColorTransform {
r_mult: self.r_mult * rhs.r_mult,
g_mult: self.g_mult * rhs.g_mult,
b_mult: self.b_mult * rhs.b_mult,
a_mult: self.a_mult * rhs.a_mult,
|
a_add: self.a_mult * rhs.a_add + self.a_add,
}
}
}
|
r_add: self.r_mult * rhs.r_add + self.r_add,
g_add: self.g_mult * rhs.b_add + self.g_add,
b_add: self.b_mult * rhs.g_add + self.b_add,
|
jumptable.py
|
import logging
from collections import defaultdict
import pyvex
from ....errors import AngrError, SimError
from ....blade import Blade
from ....annocfg import AnnotatedCFG
from .... import sim_options as o
from .... import BP, BP_BEFORE
from ....surveyors import Slicecutor
from .resolver import IndirectJumpResolver
l = logging.getLogger("angr.analyses.cfg.indirect_jump_resolvers.jumptable")
class UninitReadMeta(object):
uninit_read_base = 0xc000000
class JumpTableResolver(IndirectJumpResolver):
"""
A generic jump table resolver.
This is a fast jump table resolution. For performance concerns, we made the following assumptions:
- The final jump target comes from the memory.
- The final jump target must be directly read out of the memory, without any further modification or altering.
"""
def __init__(self, project):
super(JumpTableResolver, self).__init__(project, timeless=False)
self._bss_regions = None
# the maximum number of resolved targets. Will be initialized from CFG.
self._max_targets = None
self._find_bss_region()
def filter(self, cfg, addr, func_addr, block, jumpkind):
# TODO:
if jumpkind != "Ijk_Boring":
# Currently we only support boring ones
return False
return True
def resolve(self, cfg, addr, func_addr, block, jumpkind):
"""
Resolves jump tables.
:param cfg: A CFG instance.
:param int addr: IRSB address.
:param int func_addr: The function address.
:param pyvex.IRSB block: The IRSB.
:return: A bool indicating whether the indirect jump is resolved successfully, and a list of resolved targets
:rtype: tuple
"""
project = self.project # short-hand
self._max_targets = cfg._indirect_jump_target_limit
# Perform a backward slicing from the jump target
b = Blade(cfg.graph, addr, -1,
cfg=cfg, project=project,
ignore_sp=False, ignore_bp=False,
max_level=3, base_state=self.base_state)
stmt_loc = (addr, 'default')
if stmt_loc not in b.slice:
return False, None
load_stmt_loc, load_stmt = None, None
stmts_to_remove = [stmt_loc]
while True:
preds = list(b.slice.predecessors(stmt_loc))
if len(preds) != 1:
return False, None
block_addr, stmt_idx = stmt_loc = preds[0]
block = project.factory.block(block_addr, backup_state=self.base_state).vex
stmt = block.statements[stmt_idx]
if isinstance(stmt, (pyvex.IRStmt.WrTmp, pyvex.IRStmt.Put)):
if isinstance(stmt.data, (pyvex.IRExpr.Get, pyvex.IRExpr.RdTmp)):
# data transferring
stmts_to_remove.append(stmt_loc)
continue
elif isinstance(stmt.data, pyvex.IRExpr.ITE):
# data transferring
# t16 = if (t43) ILGop_Ident32(LDle(t29)) else 0x0000c844
# > t44 = ITE(t43,t16,0x0000c844)
stmts_to_remove.append(stmt_loc)
continue
elif isinstance(stmt.data, pyvex.IRExpr.Load):
# Got it!
load_stmt, load_stmt_loc = stmt, stmt_loc
stmts_to_remove.append(stmt_loc)
elif isinstance(stmt, pyvex.IRStmt.LoadG):
# Got it!
#
# this is how an ARM jump table is translated to VEX
# > t16 = if (t43) ILGop_Ident32(LDle(t29)) else 0x0000c844
load_stmt, load_stmt_loc = stmt, stmt_loc
stmts_to_remove.append(stmt_loc)
break
if load_stmt_loc is None:
# the load statement is not found
return False, None
# skip all statements before the load statement
b.slice.remove_nodes_from(stmts_to_remove)
# Debugging output
if l.level == logging.DEBUG:
self._dbg_repr_slice(b)
# Get all sources
sources = [ n_ for n_ in b.slice.nodes() if b.slice.in_degree(n_) == 0 ]
# Create the annotated CFG
annotatedcfg = AnnotatedCFG(project, None, detect_loops=False)
annotatedcfg.from_digraph(b.slice)
# pylint: disable=too-many-nested-blocks
for src_irsb, _ in sources:
# Use slicecutor to execute each one, and get the address
# We simply give up if any exception occurs on the way
start_state = self._initial_state(src_irsb)
# any read from an uninitialized segment should be unconstrained
if self._bss_regions:
bss_memory_read_bp = BP(when=BP_BEFORE, enabled=True, action=self._bss_memory_read_hook)
start_state.inspect.add_breakpoint('mem_read', bss_memory_read_bp)
start_state.regs.bp = start_state.arch.initial_sp + 0x2000
init_registers_on_demand_bp = BP(when=BP_BEFORE, enabled=True, action=self._init_registers_on_demand)
start_state.inspect.add_breakpoint('mem_read', init_registers_on_demand_bp)
# Create the slicecutor
slicecutor = Slicecutor(project, annotatedcfg, start=start_state, targets=(load_stmt_loc[0],),
force_taking_exit=True
)
# Run it!
try:
slicecutor.run()
except KeyError as ex:
# This is because the program slice is incomplete.
# Blade will support more IRExprs and IRStmts
l.debug("KeyError occurred due to incomplete program slice.", exc_info=ex)
continue
# Get the jumping targets
for r in slicecutor.reached_targets:
try:
succ = project.factory.successors(r)
except (AngrError, SimError):
# oops there are errors
l.warning('Cannot get jump successor states from a path that has reached the target. Skip it.')
continue
all_states = succ.flat_successors + succ.unconstrained_successors
if not all_states:
l.warning("Slicecutor failed to execute the program slice. No output state is available.")
continue
state = all_states[0] # Just take the first state
# Parse the memory load statement
jump_addr = self._parse_load_statement(load_stmt, state)
if jump_addr is None:
continue
all_targets = [ ]
total_cases = jump_addr._model_vsa.cardinality
if total_cases > self._max_targets:
# We resolved too many targets for this indirect jump. Something might have gone wrong.
l.debug("%d targets are resolved for the indirect jump at %#x. It may not be a jump table",
total_cases, addr)
return False, None
# Or alternatively, we can ask user, which is meh...
#
# jump_base_addr = int(raw_input("please give me the jump base addr: "), 16)
# total_cases = int(raw_input("please give me the total cases: "))
# jump_target = state.se.SI(bits=64, lower_bound=jump_base_addr, upper_bound=jump_base_addr +
# (total_cases - 1) * 8, stride=8)
jump_table = [ ]
min_jump_target = state.se.min(jump_addr)
max_jump_target = state.se.max(jump_addr)
|
if not project.loader.find_segment_containing(min_jump_target) or \
not project.loader.find_segment_containing(max_jump_target):
l.debug("Jump table %#x might have jump targets outside mapped memory regions. "
"Continue to resolve it from the next data source.", addr)
continue
for idx, a in enumerate(state.se.eval_upto(jump_addr, total_cases)):
if idx % 100 == 0 and idx != 0:
l.debug("%d targets have been resolved for the indirect jump at %#x...", idx, addr)
target = cfg._fast_memory_load_pointer(a)
all_targets.append(target)
jump_table.append(target)
l.info("Resolved %d targets from %#x.", len(all_targets), addr)
# write to the IndirectJump object in CFG
ij = cfg.indirect_jumps[addr]
if total_cases > 1:
# It can be considered a jump table only if there are more than one jump target
ij.jumptable = True
ij.jumptable_addr = state.se.min(jump_addr)
ij.resolved_targets = set(jump_table)
ij.jumptable_entries = jump_table
else:
ij.jumptable = False
ij.resolved_targets = set(jump_table)
return True, all_targets
return False, None
#
# Private methods
#
def _find_bss_region(self):
self._bss_regions = [ ]
# TODO: support other sections other than '.bss'.
# TODO: this is very hackish. fix it after the chaos.
for section in self.project.loader.main_object.sections:
if section.name == '.bss':
self._bss_regions.append((section.vaddr, section.memsize))
break
def _bss_memory_read_hook(self, state):
if not self._bss_regions:
return
read_addr = state.inspect.mem_read_address
read_length = state.inspect.mem_read_length
if not isinstance(read_addr, (int, long)) and read_addr.symbolic:
# don't touch it
return
concrete_read_addr = state.se.eval(read_addr)
concrete_read_length = state.se.eval(read_length)
for start, size in self._bss_regions:
if start <= concrete_read_addr < start + size:
# this is a read from the .bss section
break
else:
return
if not state.memory.was_written_to(concrete_read_addr):
# it was never written to before. we overwrite it with unconstrained bytes
for i in xrange(0, concrete_read_length, self.project.arch.bits / 8):
state.memory.store(concrete_read_addr + i, state.se.Unconstrained('unconstrained', self.project.arch.bits))
# job done :-)
@staticmethod
def _init_registers_on_demand(state):
# for uninitialized read using a register as the source address, we replace them in memory on demand
read_addr = state.inspect.mem_read_address
cond = state.inspect.mem_read_condition
if not isinstance(read_addr, (int, long)) and read_addr.uninitialized and cond is None:
read_length = state.inspect.mem_read_length
if not isinstance(read_length, (int, long)):
read_length = read_length._model_vsa.upper_bound
if read_length > 16:
return
new_read_addr = state.se.BVV(UninitReadMeta.uninit_read_base, state.arch.bits)
UninitReadMeta.uninit_read_base += read_length
# replace the expression in registers
state.registers.replace_all(read_addr, new_read_addr)
state.inspect.mem_read_address = new_read_addr
# job done :-)
def _dbg_repr_slice(self, blade):
stmts = defaultdict(set)
for addr, stmt_idx in sorted(list(blade.slice.nodes())):
stmts[addr].add(stmt_idx)
for addr in sorted(stmts.keys()):
stmt_ids = stmts[addr]
irsb = self.project.factory.block(addr, backup_state=self.base_state).vex
print " ####"
print " #### Block %#x" % addr
print " ####"
for i, stmt in enumerate(irsb.statements):
taken = i in stmt_ids
s = "%s %x:%02d | " % ("+" if taken else " ", addr, i)
s += "%s " % stmt.__str__(arch=self.project.arch, tyenv=irsb.tyenv)
if taken:
s += "IN: %d" % blade.slice.in_degree((addr, i))
print s
# the default exit
default_exit_taken = 'default' in stmt_ids
s = "%s %x:default | PUT(%s) = %s; %s" % ("+" if default_exit_taken else " ", addr, irsb.offsIP, irsb.next,
irsb.jumpkind
)
print s
def _initial_state(self, src_irsb):
state = self.project.factory.blank_state(
addr=src_irsb,
mode='static',
add_options={
o.DO_RET_EMULATION,
o.TRUE_RET_EMULATION_GUARD,
o.AVOID_MULTIVALUED_READS,
},
remove_options={
o.CGC_ZERO_FILL_UNCONSTRAINED_MEMORY,
o.UNINITIALIZED_ACCESS_AWARENESS,
} | o.refs
)
return state
@staticmethod
def _parse_load_statement(load_stmt, state):
"""
Parse a memory load VEX statement and get the jump target addresses.
:param load_stmt: The VEX statement for loading the jump target addresses.
:param state: The SimState instance (in static mode).
:return: A tuple of an abstract value (or a concrete value) representing the jump target addresses,
and a set of extra concrete targets. Return (None, None) if we fail to parse the statement.
"""
# The jump table address is stored in a tmp. In this case, we find the jump-target loading tmp.
load_addr_tmp = None
if isinstance(load_stmt, pyvex.IRStmt.WrTmp):
load_addr_tmp = load_stmt.data.addr.tmp
elif isinstance(load_stmt, pyvex.IRStmt.LoadG):
if type(load_stmt.addr) is pyvex.IRExpr.RdTmp:
load_addr_tmp = load_stmt.addr.tmp
elif type(load_stmt.addr) is pyvex.IRExpr.Const:
# It's directly loading from a constant address
# e.g.,
# 4352c SUB R1, R11, #0x1000
# 43530 LDRHI R3, =loc_45450
# ...
# 43540 MOV PC, R3
#
# It's not a jump table, but we resolve it anyway
# TODO: We should develop an ARM-specific indirect jump resolver in this case
# Note that this block has two branches: One goes to 45450, the other one goes to whatever the original
# value of R3 is. Some intensive data-flow analysis is required in this case.
jump_target_addr = load_stmt.addr.con.value
return state.se.BVV(jump_target_addr, state.arch.bits)
else:
raise TypeError("Unsupported address loading statement type %s." % type(load_stmt))
if load_addr_tmp not in state.scratch.temps:
# the tmp variable is not there... umm...
return None
jump_addr = state.scratch.temps[load_addr_tmp]
if isinstance(load_stmt, pyvex.IRStmt.LoadG):
# LoadG comes with a guard. We should apply this guard to the load expression
guard_tmp = load_stmt.guard.tmp
guard = state.scratch.temps[guard_tmp] != 0
jump_addr = state.memory._apply_condition_to_symbolic_addr(jump_addr, guard)
return jump_addr
|
# Both the min jump target and the max jump target should be within a mapped memory region
# i.e., we shouldn't be jumping to the stack or somewhere unmapped
|
txd.rs
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::TXD {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
|
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct TXDR {
bits: u8,
}
impl TXDR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _TXDW<'a> {
w: &'a mut W,
}
impl<'a> _TXDW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 255;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:7 - TXD register"]
#[inline]
pub fn txd(&self) -> TXDR {
let bits = {
const MASK: u8 = 255;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
};
TXDR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:7 - TXD register"]
#[inline]
pub fn txd(&mut self) -> _TXDW {
_TXDW { w: self }
}
}
|
self.register.set(w.bits);
|
time.rs
|
pub struct CurrentTime(pub f64);
|
#[derive(Default)]
|
|
sleep.rs
|
//
// Copyright (c) 2018, The MesaLock Linux Project Contributors
// All rights reserved.
//
// This work is licensed under the terms of the BSD 3-Clause License.
// For a copy, see the LICENSE file.
//
use assert_cmd::prelude::*;
use std::process::Command;
use std::time::{Duration, Instant};
const NAME: &str = "sleep";
const SLEEP_TIME: f32 = 5.75;
const DIFF: f32 = 1.5;
#[test]
fn test_one_param()
|
#[test]
fn test_many_params() {
let now = Instant::now();
new_cmd!()
.args(&[(SLEEP_TIME / 4.0).to_string(), (SLEEP_TIME / 2.0).to_string(), (SLEEP_TIME / 8.0).to_string(), (SLEEP_TIME / 8.0).to_string()])
.assert()
.success();
validate_duration(now.elapsed(), SLEEP_TIME);
}
fn validate_duration(duration: Duration, sleep_time: f32) {
let time = duration.as_secs() as f32 + duration.subsec_nanos() as f32 / 1_000_000_000.0;
if (time - sleep_time).abs() > DIFF {
panic!("slept for too long ({} secs instead of {} secs)", time, sleep_time);
}
}
|
{
let now = Instant::now();
new_cmd!()
.args(&[SLEEP_TIME.to_string()])
.assert()
.success();
validate_duration(now.elapsed(), SLEEP_TIME);
}
|
second_run_gear_images_without_cache.py
|
import sys
# package need to be installed, pip install docker
import docker
import time
import yaml
import os
import random
import subprocess
import signal
import urllib2
import shutil
import xlwt
# package need to be installed, apt-get install python-memcache
import memcache
auto = False
private_registry = "202.114.10.146:9999/"
suffix = "-gearmd"
apppath = ""
# run paraments
hostPort = 11211
localVolume = ""
pwd = os.path.split(os.path.realpath(__file__))[0]
runEnvironment = []
runPorts = {"11211/tcp": hostPort, }
runVolumes = {}
runWorking_dir = ""
runCommand = "memcached -p 11211"
waitline = ""
# result
result = [["tag", "finishTime", "local data", "pull data"], ]
class Runner:
def __init__(self, images):
self.images_to_pull = images
def check(self):
# detect whether the file exists, if true, delete it
if os.path.exists("./images_run.txt"):
os.remove("./images_run.txt")
def run(self):
self.check()
client = docker.from_env()
# if don't give a tag, then all image under this registry will be pulled
repos = self.images_to_pull[0]["repo"]
for repo in repos:
tags = self.images_to_pull[1][repo]
for tag in tags:
private_repo = private_registry + repo + suffix + ":" + tag
if localVolume != "":
if os.path.exists(localVolume) == False:
os.makedirs(localVolume)
print "start running: ", private_repo
# create a random name
runName = '%d' % (random.randint(1,100000000))
# get present time
startTime = time.time()
# get present net data
cnetdata = get_net_data()
# run images
container = client.containers.create(image=private_repo, environment=runEnvironment,
ports=runPorts, volumes=runVolumes, working_dir=runWorking_dir,
command=runCommand, name=runName, detach=True)
container.start()
while True:
if time.time() - startTime > 600:
break
try:
conn = memcache.Client(["localhost:11211"])
ret = conn.set("game", "Three kingdoms")
if ret != True:
continue
print "successfully insert!"
ret = conn.replace("game", "dota2")
if ret != True:
continue
print "successfully update!"
print conn.get("game")
ret = conn.delete("game")
if ret != True:
continue
print "successfully delete!"
break
except:
time.sleep(0.1) # wait 100ms
pass
# print run time
finishTime = time.time() - startTime
print "finished in " , finishTime, "s"
container_path = os.path.join("/var/lib/gear/private", private_repo)
local_data = subprocess.check_output(['du','-sh', container_path]).split()[0].decode('utf-8')
print "local data: ", local_data
pull_data = get_net_data() - cnetdata
print "pull data: ", pull_data
try:
container.kill()
except:
print "kill fail!"
pass
container.remove(force=True)
# delete files under /var/lib/gear/public/
shutil.rmtree('/var/lib/gear/public/')
os.mkdir('/var/lib/gear/public/')
print "empty cache! \n"
# record the image and its Running time
result.append([tag, finishTime, local_data, pull_data])
if auto != True:
raw_input("Next?")
else:
time.sleep(5)
if localVolume != "":
shutil.rmtree(localVolume)
class Generator:
def __init__(self, profilePath=""):
self.profilePath = profilePath
def generateFromProfile(self):
if self.profilePath == "":
print "Error: profile path is null"
with open(self.profilePath, 'r') as f:
self.images = yaml.load(f)
return self.images
def
|
():
netCard = "/proc/net/dev"
fd = open(netCard, "r")
for line in fd.readlines():
if line.find("enp0s3") >= 0:
field = line.split()
data = float(field[1]) / 1024.0 / 1024.0
fd.close()
return data
if __name__ == "__main__":
if len(sys.argv) == 2:
auto = True
generator = Generator(os.path.split(os.path.realpath(__file__))[0]+"/image_versions.yaml")
images = generator.generateFromProfile()
runner = Runner(images)
runner.run()
# create a workbook sheet
workbook = xlwt.Workbook()
sheet = workbook.add_sheet("run_time")
for row in range(len(result)):
for column in range(len(result[row])):
sheet.write(row, column, result[row][column])
workbook.save(os.path.split(os.path.realpath(__file__))[0]+"/second_run_without_cache.xls")
|
get_net_data
|
accordionTitleBehavior.ts
|
import { keyboardKey, SpacebarKey } from '@fluentui/keyboard-key';
import { Accessibility } from '../../types';
/**
* @description
* Adds accessibility attributed to implement the Accordion design pattern.
* Adds 'aria-disabled' to the 'content' slot with a value based on disabled, active and canBeCollapsed props.
* Adds role='heading' and aria-level='3' if the element type is not a header.
*
* @specification
|
* Triggers 'performClick' action with 'Enter' or 'Spacebar' on 'content'.
*/
export const accordionTitleBehavior: Accessibility<AccordionTitleBehaviorProps> = props => {
const isHeading = /(h\d{1})$/.test(props.as);
const forcedOpen = props.active && !props.canBeCollapsed;
return {
attributes: {
root: {
role: isHeading ? undefined : 'heading',
'aria-level': isHeading ? undefined : 3,
},
content: {
'aria-expanded': !!props.active,
'aria-disabled': !!(forcedOpen || props.disabled),
'aria-controls': props.accordionContentId,
role: 'button',
tabIndex: 0,
},
},
keyActions: {
content: {
performClick: {
keyCombinations: [{ keyCode: keyboardKey.Enter }, { keyCode: SpacebarKey }],
},
},
},
};
};
export type AccordionTitleBehaviorProps = {
/** Element type. */
as?: string;
/** Whether or not the title is in the open state. */
active?: boolean;
/** If at least one panel needs to stay active and this title does not correspond to the last active one. */
canBeCollapsed?: boolean;
/** An accordion title can show it is currently unable to be interacted with. */
disabled?: boolean;
/** Id of the content it owns. */
accordionContentId?: string;
};
|
* Adds attribute 'role=button' to 'content' slot.
* Adds attribute 'tabIndex=0' to 'content' slot.
* Adds attribute 'aria-expanded=true' based on the property 'active' to 'content' slot.
* Adds attribute 'aria-controls=content-id' based on the property 'accordionContentId' to 'content' slot.
|
ArtworkSidebarCommercial.tsx
|
import { Intent, ContextModule } from "@artsy/cohesion"
import {
Box,
Button,
Flex,
FlexProps,
Radio,
RadioGroup,
Sans,
Separator,
Serif,
Spacer,
} from "@artsy/palette"
import { ArtworkSidebarCommercial_artwork } from "__generated__/ArtworkSidebarCommercial_artwork.graphql"
import { ArtworkSidebarCommercialOfferOrderMutation } from "__generated__/ArtworkSidebarCommercialOfferOrderMutation.graphql"
import { ArtworkSidebarCommercialOrderMutation } from "__generated__/ArtworkSidebarCommercialOrderMutation.graphql"
import { Mediator, SystemContext } from "Artsy"
import { track } from "Artsy/Analytics"
import * as Schema from "Artsy/Analytics/Schema"
import { ModalType } from "Components/Authentication/Types"
import { ErrorModal } from "Components/Modal/ErrorModal"
import currency from "currency.js"
import { Router } from "found"
import React, { FC, useContext } from "react"
import {
commitMutation,
createFragmentContainer,
graphql,
RelayProp,
} from "react-relay"
import { ErrorWithMetadata } from "Utils/errors"
import { get } from "Utils/get"
import createLogger from "Utils/logger"
import { openAuthModal } from "Utils/openAuthModal"
import { ArtworkSidebarSizeInfoFragmentContainer as SizeInfo } from "./ArtworkSidebarSizeInfo"
type EditionSet = ArtworkSidebarCommercial_artwork["edition_sets"][0]
export interface ArtworkSidebarCommercialContainerProps
extends ArtworkSidebarCommercialProps {
mediator: Mediator
router?: Router
user: User
EXPERIMENTAL_APP_SHELL?: boolean
}
export interface ArtworkSidebarCommercialContainerState {
isCommittingCreateOrderMutation: boolean
isCommittingCreateOfferOrderMutation: boolean
isErrorModalOpen: boolean
selectedEditionSet: EditionSet
}
const Row: React.SFC<FlexProps> = ({ children, ...others }) => (
<Flex justifyContent="left" {...others}>
{children}
</Flex>
)
const logger = createLogger(
"Artwork/Components/ArtworkSidebar/ArtworkSidebarCommercial.tsx"
)
@track()
export class ArtworkSidebarCommercialContainer extends React.Component<
ArtworkSidebarCommercialContainerProps,
ArtworkSidebarCommercialContainerState
> {
state: ArtworkSidebarCommercialContainerState = {
isCommittingCreateOrderMutation: false,
isCommittingCreateOfferOrderMutation: false,
isErrorModalOpen: false,
selectedEditionSet: this.firstAvailableEcommerceEditionSet(),
}
firstAvailableEcommerceEditionSet(): EditionSet {
const editionSets = this.props.artwork.edition_sets
return editionSets.find(editionSet => {
return editionSet.is_acquireable || editionSet.is_offerable
})
}
renderSaleMessage(saleMessage: string) {
return (
<Serif size="5t" weight="semibold" data-test="SaleMessage">
{saleMessage}
</Serif>
)
}
renderEditionSet(editionSet: EditionSet, includeSelectOption: boolean) {
const editionEcommerceAvailable =
editionSet.is_acquireable || editionSet.is_offerable
const editionFragment = (
<>
<SizeInfo piece={editionSet} />
<Serif ml="auto" size="2" data-test="SaleMessage">
{editionSet.sale_message}
</Serif>
</>
)
if (includeSelectOption) {
return (
<Row>
<Radio
mr="1"
onSelect={e => {
this.setState({ selectedEditionSet: editionSet })
|
/>
</Row>
)
} else {
return <Row>{editionFragment}</Row>
}
}
renderEditionSets(includeSelectOption: boolean) {
const editionSets = this.props.artwork.edition_sets
const editionSetsFragment = editionSets.map((editionSet, index) => {
return (
<React.Fragment key={editionSet.id}>
<Box py={3}>
{this.renderEditionSet(editionSet, includeSelectOption)}
</Box>
{index !== editionSets.length - 1 && <Separator />}
</React.Fragment>
)
})
return <RadioGroup>{editionSetsFragment}</RadioGroup>
}
onMutationError = (error: ErrorWithMetadata) => {
logger.error(error)
this.setState({
isCommittingCreateOrderMutation: false,
isErrorModalOpen: true,
})
}
onCloseModal = () => {
this.setState({ isErrorModalOpen: false })
}
@track<ArtworkSidebarCommercialContainerProps>(props => ({
context_module: Schema.ContextModule.Sidebar,
action_type: Schema.ActionType.ClickedContactGallery,
subject: Schema.Subject.ContactGallery,
artwork_id: props.artwork.internalID,
artwork_slug: props.artwork.slug,
}))
handleInquiry() {
get(this.props, props => props.mediator.trigger) &&
this.props.mediator.trigger("launchInquiryFlow", {
artworkId: this.props.artwork.internalID,
})
}
@track<ArtworkSidebarCommercialContainerProps>((props, state, args) => ({
action_type: Schema.ActionType.ClickedBuyNow,
flow: Schema.Flow.BuyNow,
type: Schema.Type.Button,
artwork_id: props.artwork.internalID,
artwork_slug: props.artwork.slug,
products: [
{
product_id: props.artwork.internalID,
quantity: 1,
price: currency(props.artwork.listPrice.display).value,
},
],
}))
handleCreateOrder() {
const { user, mediator } = this.props
if (user && user.id) {
this.setState({ isCommittingCreateOrderMutation: true }, () => {
if (get(this.props, props => props.relay.environment)) {
commitMutation<ArtworkSidebarCommercialOrderMutation>(
this.props.relay.environment,
{
// TODO: Inputs to the mutation might have changed case of the keys!
mutation: graphql`
mutation ArtworkSidebarCommercialOrderMutation(
$input: CommerceCreateOrderWithArtworkInput!
) {
commerceCreateOrderWithArtwork(input: $input) {
orderOrError {
... on CommerceOrderWithMutationSuccess {
__typename
order {
internalID
mode
}
}
... on CommerceOrderWithMutationFailure {
error {
type
code
data
}
}
}
}
}
`,
variables: {
input: {
artworkId: this.props.artwork.internalID,
editionSetId: get(
this.state,
state => state.selectedEditionSet.internalID
),
},
},
onCompleted: data => {
this.setState(
{ isCommittingCreateOrderMutation: false },
() => {
const {
commerceCreateOrderWithArtwork: { orderOrError },
} = data
if (orderOrError.error) {
this.onMutationError(
new ErrorWithMetadata(
orderOrError.error.code,
orderOrError.error
)
)
} else {
const url = `/orders/${orderOrError.order.internalID}`
// FIXME: Remove once A/B test completes
if (this.props.EXPERIMENTAL_APP_SHELL) {
this.props.router.push(url)
} else {
window.location.assign(url)
}
}
}
)
},
onError: this.onMutationError,
}
)
}
})
} else {
openAuthModal(mediator, {
mode: ModalType.signup,
redirectTo: location.href,
contextModule: ContextModule.artworkSidebar,
intent: Intent.buyNow,
})
}
}
@track<ArtworkSidebarCommercialContainerProps>((props, state, args) => ({
action_type: Schema.ActionType.ClickedMakeOffer,
flow: Schema.Flow.MakeOffer,
type: Schema.Type.Button,
artwork_id: props.artwork.internalID,
artwork_slug: props.artwork.slug,
}))
handleCreateOfferOrder() {
const { user, mediator } = this.props
if (user && user.id) {
this.setState({ isCommittingCreateOfferOrderMutation: true }, () => {
if (get(this.props, props => props.relay.environment)) {
commitMutation<ArtworkSidebarCommercialOfferOrderMutation>(
this.props.relay.environment,
{
// TODO: Inputs to the mutation might have changed case of the keys!
mutation: graphql`
mutation ArtworkSidebarCommercialOfferOrderMutation(
$input: CommerceCreateOfferOrderWithArtworkInput!
) {
commerceCreateOfferOrderWithArtwork(input: $input) {
orderOrError {
... on CommerceOrderWithMutationSuccess {
__typename
order {
internalID
mode
}
}
... on CommerceOrderWithMutationFailure {
error {
type
code
data
}
}
}
}
}
`,
variables: {
input: {
artworkId: this.props.artwork.internalID,
editionSetId: get(
this.state,
state => state.selectedEditionSet.internalID
),
},
},
onCompleted: data => {
this.setState(
{ isCommittingCreateOfferOrderMutation: false },
() => {
const {
commerceCreateOfferOrderWithArtwork: { orderOrError },
} = data
if (orderOrError.error) {
this.onMutationError(
new ErrorWithMetadata(
orderOrError.error.code,
orderOrError.error
)
)
} else {
const url = `/orders/${orderOrError.order.internalID}/offer`
// FIXME: Remove once A/B test completes
if (this.props.EXPERIMENTAL_APP_SHELL) {
this.props.router.push(url)
} else {
window.location.assign(url)
}
}
}
)
},
onError: this.onMutationError,
}
)
}
})
} else {
openAuthModal(mediator, {
mode: ModalType.signup,
redirectTo: location.href,
contextModule: ContextModule.artworkSidebar,
intent: Intent.makeOffer,
})
}
}
render() {
const { artwork } = this.props
const {
isCommittingCreateOrderMutation,
isCommittingCreateOfferOrderMutation,
selectedEditionSet,
} = this.state
const artworkEcommerceAvailable =
artwork.is_acquireable || artwork.is_offerable
if (!artwork.sale_message && !artwork.is_inquireable) {
return <Separator />
}
return (
<Box textAlign="left">
{artwork.sale_message && <Separator />}
{artwork.edition_sets.length < 2 ? (
artwork.sale_message && (
<>
<Spacer mb={3} />
{this.renderSaleMessage(artwork.sale_message)}
</>
)
) : (
<>
{this.renderEditionSets(artworkEcommerceAvailable)}
{selectedEditionSet && (
<>
<Separator mb={3} />
{this.renderSaleMessage(selectedEditionSet.sale_message)}
</>
)}
</>
)}
{artworkEcommerceAvailable &&
(artwork.shippingOrigin || artwork.shippingInfo) && <Spacer mt={1} />}
{artworkEcommerceAvailable && artwork.shippingOrigin && (
<Sans size="2" color="black60">
Ships from {artwork.shippingOrigin}
</Sans>
)}
{artworkEcommerceAvailable && artwork.shippingInfo && (
<Sans size="2" color="black60">
{artwork.shippingInfo}
</Sans>
)}
{artworkEcommerceAvailable && artwork.priceIncludesTaxDisplay && (
<Sans size="2" color="black60">
{artwork.priceIncludesTaxDisplay}
</Sans>
)}
{artwork.is_inquireable ||
artwork.is_acquireable ||
artwork.is_offerable ? (
artwork.sale_message && <Spacer mb={3} />
) : (
<Separator mb={3} mt={3} />
)}
{artwork.is_acquireable && (
<Button
width="100%"
size="large"
loading={isCommittingCreateOrderMutation}
onClick={this.handleCreateOrder.bind(this)}
>
Buy now
</Button>
)}
{artwork.is_offerable && (
<>
<Spacer mb={2} />
<Button
variant={
artwork.is_acquireable ? "secondaryOutline" : "primaryBlack"
}
width="100%"
size="large"
loading={isCommittingCreateOfferOrderMutation}
onClick={this.handleCreateOfferOrder.bind(this)}
>
Make offer
</Button>
</>
)}
{artwork.is_inquireable &&
!artwork.is_acquireable &&
!artwork.is_offerable && (
<Button
width="100%"
size="large"
onClick={this.handleInquiry.bind(this)}
>
Contact gallery
</Button>
)}
<ErrorModal
onClose={this.onCloseModal}
show={this.state.isErrorModalOpen}
contactEmail="[email protected]"
/>
</Box>
)
}
}
interface ArtworkSidebarCommercialProps {
artwork: ArtworkSidebarCommercial_artwork
relay?: RelayProp
}
export const ArtworkSidebarCommercial: FC<ArtworkSidebarCommercialProps> = props => {
const { mediator, router, user, EXPERIMENTAL_APP_SHELL } = useContext(
SystemContext
)
return (
<ArtworkSidebarCommercialContainer
{...props}
mediator={mediator}
router={router}
user={user}
EXPERIMENTAL_APP_SHELL={EXPERIMENTAL_APP_SHELL}
/>
)
}
export const ArtworkSidebarCommercialFragmentContainer = createFragmentContainer(
ArtworkSidebarCommercial,
{
artwork: graphql`
fragment ArtworkSidebarCommercial_artwork on Artwork {
slug
internalID
is_for_sale: isForSale
is_acquireable: isAcquireable
is_inquireable: isInquireable
is_offerable: isOfferable
listPrice {
... on PriceRange {
display
}
... on Money {
display
}
}
priceIncludesTaxDisplay
sale_message: saleMessage
shippingInfo
shippingOrigin
edition_sets: editionSets {
internalID
id
is_acquireable: isAcquireable
is_offerable: isOfferable
sale_message: saleMessage
...ArtworkSidebarSizeInfo_piece
}
}
`,
}
)
|
}}
selected={this.state.selectedEditionSet === editionSet}
disabled={!editionEcommerceAvailable}
label={editionFragment}
|
tag_sets.rs
|
// Copyright 2014-2017 The html5ever Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Various sets of HTML tag names, and macros for declaring them.
use ExpandedName;
macro_rules! declare_tag_set_impl ( ($param:ident, $b:ident, $supr:ident, $($tag:tt)+) => (
match $param {
$( expanded_name!(html $tag) => $b, )+
p => $supr(p),
}
));
macro_rules! declare_tag_set_body (
($param:ident = [$supr:ident] - $($tag:tt)+)
=> ( declare_tag_set_impl!($param, false, $supr, $($tag)+) );
($param:ident = [$supr:ident] + $($tag:tt)+)
=> ( declare_tag_set_impl!($param, true, $supr, $($tag)+) );
($param:ident = $($tag:tt)+)
=> ( declare_tag_set_impl!($param, true, empty_set, $($tag)+) );
);
macro_rules! declare_tag_set (
(pub $name:ident = $($toks:tt)+) => (
pub fn $name(p: ::ExpandedName) -> bool {
declare_tag_set_body!(p = $($toks)+)
}
);
($name:ident = $($toks:tt)+) => (
fn $name(p: ::ExpandedName) -> bool {
declare_tag_set_body!(p = $($toks)+)
}
);
);
#[inline(always)] pub fn empty_set(_: ExpandedName) -> bool { false }
#[inline(always)] pub fn full_set(_: ExpandedName) -> bool { true }
declare_tag_set!(pub html_default_scope =
"applet" "caption" "html" "table" "td" "th" "marquee" "object" "template");
#[inline(always)] pub fn default_scope(name: ExpandedName) -> bool {
html_default_scope(name) ||
mathml_text_integration_point(name) ||
svg_html_integration_point(name)
}
declare_tag_set!(pub list_item_scope = [default_scope] + "ol" "ul");
declare_tag_set!(pub button_scope = [default_scope] + "button");
declare_tag_set!(pub table_scope = "html" "table" "template");
declare_tag_set!(pub select_scope = [full_set] - "optgroup" "option");
declare_tag_set!(pub table_body_context = "tbody" "tfoot" "thead" "template" "html");
declare_tag_set!(pub table_row_context = "tr" "template" "html");
declare_tag_set!(pub td_th = "td" "th");
declare_tag_set!(pub cursory_implied_end =
"dd" "dt" "li" "option" "optgroup" "p" "rb" "rp" "rt" "rtc");
declare_tag_set!(pub thorough_implied_end = [cursory_implied_end]
+ "caption" "colgroup" "tbody" "td" "tfoot" "th" "thead" "tr");
declare_tag_set!(pub heading_tag = "h1" "h2" "h3" "h4" "h5" "h6");
declare_tag_set!(pub special_tag =
"address" "applet" "area" "article" "aside" "base" "basefont" "bgsound" "blockquote" "body"
"br" "button" "caption" "center" "col" "colgroup" "dd" "details" "dir" "div" "dl" "dt" "embed"
"fieldset" "figcaption" "figure" "footer" "form" "frame" "frameset" "h1" "h2" "h3" "h4" "h5"
"h6" "head" "header" "hgroup" "hr" "html" "iframe" "img" "input" "isindex" "li" "link"
"listing" "main" "marquee" "menu" "meta" "nav" "noembed" "noframes" "noscript"
"object" "ol" "p" "param" "plaintext" "pre" "script" "section" "select" "source" "style"
"summary" "table" "tbody" "td" "template" "textarea" "tfoot" "th" "thead" "title" "tr" "track"
"ul" "wbr" "xmp");
//§ END
pub fn mathml_text_integration_point(p: ExpandedName) -> bool {
matches!(p,
expanded_name!(mathml "mi") |
expanded_name!(mathml "mo") |
expanded_name!(mathml "mn") |
expanded_name!(mathml "ms") |
expanded_name!(mathml "mtext"))
}
/// https://html.spec.whatwg.org/multipage/#html-integration-point
pub fn svg_html_integration_point(p: ExpandedName) -> bool {
|
// annotation-xml are handle in another place
matches!(p,
expanded_name!(svg "foreignObject") |
expanded_name!(svg "desc") |
expanded_name!(svg "title"))
}
|
|
sort_items.rs
|
use std::cmp::Ordering;
use itertools::Itertools;
use syntax::{
ast::{self, HasName},
ted, AstNode, TextRange,
};
use crate::{utils::get_methods, AssistContext, AssistId, AssistKind, Assists};
// Assist: sort_items
//
// Sorts item members alphabetically: fields, enum variants and methods.
//
// ```
// struct $0Foo$0 { second: u32, first: String }
// ```
// ->
// ```
// struct Foo { first: String, second: u32 }
// ```
// ---
// ```
// trait $0Bar$0 {
// fn second(&self) -> u32;
// fn first(&self) -> String;
// }
// ```
// ->
// ```
// trait Bar {
// fn first(&self) -> String;
// fn second(&self) -> u32;
// }
// ```
// ---
// ```
// struct Baz;
// impl $0Baz$0 {
// fn second(&self) -> u32;
// fn first(&self) -> String;
// }
// ```
// ->
// ```
// struct Baz;
// impl Baz {
// fn first(&self) -> String;
// fn second(&self) -> u32;
// }
// ```
// ---
// There is a difference between sorting enum variants:
//
// ```
// enum $0Animal$0 {
// Dog(String, f64),
// Cat { weight: f64, name: String },
// }
// ```
// ->
// ```
// enum Animal {
// Cat { weight: f64, name: String },
// Dog(String, f64),
// }
// ```
// and sorting a single enum struct variant:
//
// ```
// enum Animal {
// Dog(String, f64),
// Cat $0{ weight: f64, name: String }$0,
// }
// ```
// ->
// ```
// enum Animal {
// Dog(String, f64),
// Cat { name: String, weight: f64 },
// }
// ```
pub(crate) fn sort_items(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
if ctx.has_empty_selection() {
cov_mark::hit!(not_applicable_if_no_selection);
return None;
}
if let Some(trait_ast) = ctx.find_node_at_offset::<ast::Trait>() {
add_sort_methods_assist(acc, trait_ast.assoc_item_list()?)
} else if let Some(impl_ast) = ctx.find_node_at_offset::<ast::Impl>() {
add_sort_methods_assist(acc, impl_ast.assoc_item_list()?)
} else if let Some(struct_ast) = ctx.find_node_at_offset::<ast::Struct>() {
add_sort_field_list_assist(acc, struct_ast.field_list())
} else if let Some(union_ast) = ctx.find_node_at_offset::<ast::Union>() {
add_sort_fields_assist(acc, union_ast.record_field_list()?)
} else if let Some(variant_ast) = ctx.find_node_at_offset::<ast::Variant>() {
add_sort_field_list_assist(acc, variant_ast.field_list())
} else if let Some(enum_struct_variant_ast) = ctx.find_node_at_offset::<ast::RecordFieldList>()
{
// should be above enum and below struct
add_sort_fields_assist(acc, enum_struct_variant_ast)
} else if let Some(enum_ast) = ctx.find_node_at_offset::<ast::Enum>() {
add_sort_variants_assist(acc, enum_ast.variant_list()?)
|
} else {
None
}
}
trait AddRewrite {
fn add_rewrite<T: AstNode>(
&mut self,
label: &str,
old: Vec<T>,
new: Vec<T>,
target: TextRange,
) -> Option<()>;
}
impl AddRewrite for Assists {
fn add_rewrite<T: AstNode>(
&mut self,
label: &str,
old: Vec<T>,
new: Vec<T>,
target: TextRange,
) -> Option<()> {
self.add(AssistId("sort_items", AssistKind::RefactorRewrite), label, target, |builder| {
let mutable: Vec<T> = old.into_iter().map(|it| builder.make_mut(it)).collect();
mutable
.into_iter()
.zip(new)
.for_each(|(old, new)| ted::replace(old.syntax(), new.clone_for_update().syntax()));
})
}
}
fn add_sort_field_list_assist(acc: &mut Assists, field_list: Option<ast::FieldList>) -> Option<()> {
match field_list {
Some(ast::FieldList::RecordFieldList(it)) => add_sort_fields_assist(acc, it),
_ => {
cov_mark::hit!(not_applicable_if_sorted_or_empty_or_single);
None
}
}
}
fn add_sort_methods_assist(acc: &mut Assists, item_list: ast::AssocItemList) -> Option<()> {
let methods = get_methods(&item_list);
let sorted = sort_by_name(&methods);
if methods == sorted {
cov_mark::hit!(not_applicable_if_sorted_or_empty_or_single);
return None;
}
acc.add_rewrite("Sort methods alphabetically", methods, sorted, item_list.syntax().text_range())
}
fn add_sort_fields_assist(
acc: &mut Assists,
record_field_list: ast::RecordFieldList,
) -> Option<()> {
let fields: Vec<_> = record_field_list.fields().collect();
let sorted = sort_by_name(&fields);
if fields == sorted {
cov_mark::hit!(not_applicable_if_sorted_or_empty_or_single);
return None;
}
acc.add_rewrite(
"Sort fields alphabetically",
fields,
sorted,
record_field_list.syntax().text_range(),
)
}
fn add_sort_variants_assist(acc: &mut Assists, variant_list: ast::VariantList) -> Option<()> {
let variants: Vec<_> = variant_list.variants().collect();
let sorted = sort_by_name(&variants);
if variants == sorted {
cov_mark::hit!(not_applicable_if_sorted_or_empty_or_single);
return None;
}
acc.add_rewrite(
"Sort variants alphabetically",
variants,
sorted,
variant_list.syntax().text_range(),
)
}
fn sort_by_name<T: HasName + Clone>(initial: &[T]) -> Vec<T> {
initial
.iter()
.cloned()
.sorted_by(|a, b| match (a.name(), b.name()) {
(Some(a), Some(b)) => Ord::cmp(&a.to_string(), &b.to_string()),
// unexpected, but just in case
(None, None) => Ordering::Equal,
(None, Some(_)) => Ordering::Less,
(Some(_), None) => Ordering::Greater,
})
.collect()
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::*;
#[test]
fn not_applicable_if_no_selection() {
cov_mark::check!(not_applicable_if_no_selection);
check_assist_not_applicable(
sort_items,
r#"
t$0rait Bar {
fn b();
fn a();
}
"#,
)
}
#[test]
fn not_applicable_if_trait_empty() {
cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
check_assist_not_applicable(
sort_items,
r#"
t$0rait Bar$0 {
}
"#,
)
}
#[test]
fn not_applicable_if_impl_empty() {
cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
check_assist_not_applicable(
sort_items,
r#"
struct Bar;
$0impl Bar$0 {
}
"#,
)
}
#[test]
fn not_applicable_if_struct_empty() {
cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
check_assist_not_applicable(
sort_items,
r#"
$0struct Bar$0 ;
"#,
)
}
#[test]
fn not_applicable_if_struct_empty2() {
cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
check_assist_not_applicable(
sort_items,
r#"
$0struct Bar$0 { };
"#,
)
}
#[test]
fn not_applicable_if_enum_empty() {
cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
check_assist_not_applicable(
sort_items,
r#"
$0enum ZeroVariants$0 {};
"#,
)
}
#[test]
fn not_applicable_if_trait_sorted() {
cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
check_assist_not_applicable(
sort_items,
r#"
t$0rait Bar$0 {
fn a() {}
fn b() {}
fn c() {}
}
"#,
)
}
#[test]
fn not_applicable_if_impl_sorted() {
cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
check_assist_not_applicable(
sort_items,
r#"
struct Bar;
$0impl Bar$0 {
fn a() {}
fn b() {}
fn c() {}
}
"#,
)
}
#[test]
fn not_applicable_if_struct_sorted() {
cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
check_assist_not_applicable(
sort_items,
r#"
$0struct Bar$0 {
a: u32,
b: u8,
c: u64,
}
"#,
)
}
#[test]
fn not_applicable_if_union_sorted() {
cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
check_assist_not_applicable(
sort_items,
r#"
$0union Bar$0 {
a: u32,
b: u8,
c: u64,
}
"#,
)
}
#[test]
fn not_applicable_if_enum_sorted() {
cov_mark::check!(not_applicable_if_sorted_or_empty_or_single);
check_assist_not_applicable(
sort_items,
r#"
$0enum Bar$0 {
a,
b,
c,
}
"#,
)
}
#[test]
fn sort_trait() {
check_assist(
sort_items,
r#"
$0trait Bar$0 {
fn a() {
}
// comment for c
fn c() {}
fn z() {}
fn b() {}
}
"#,
r#"
trait Bar {
fn a() {
}
fn b() {}
// comment for c
fn c() {}
fn z() {}
}
"#,
)
}
#[test]
fn sort_impl() {
check_assist(
sort_items,
r#"
struct Bar;
$0impl Bar$0 {
fn c() {}
fn a() {}
/// long
/// doc
/// comment
fn z() {}
fn d() {}
}
"#,
r#"
struct Bar;
impl Bar {
fn a() {}
fn c() {}
fn d() {}
/// long
/// doc
/// comment
fn z() {}
}
"#,
)
}
#[test]
fn sort_struct() {
check_assist(
sort_items,
r#"
$0struct Bar$0 {
b: u8,
a: u32,
c: u64,
}
"#,
r#"
struct Bar {
a: u32,
b: u8,
c: u64,
}
"#,
)
}
#[test]
fn sort_generic_struct_with_lifetime() {
check_assist(
sort_items,
r#"
$0struct Bar<'a,$0 T> {
d: &'a str,
b: u8,
a: T,
c: u64,
}
"#,
r#"
struct Bar<'a, T> {
a: T,
b: u8,
c: u64,
d: &'a str,
}
"#,
)
}
#[test]
fn sort_struct_fields_diff_len() {
check_assist(
sort_items,
r#"
$0struct Bar $0{
aaa: u8,
a: usize,
b: u8,
}
"#,
r#"
struct Bar {
a: usize,
aaa: u8,
b: u8,
}
"#,
)
}
#[test]
fn sort_union() {
check_assist(
sort_items,
r#"
$0union Bar$0 {
b: u8,
a: u32,
c: u64,
}
"#,
r#"
union Bar {
a: u32,
b: u8,
c: u64,
}
"#,
)
}
#[test]
fn sort_enum() {
check_assist(
sort_items,
r#"
$0enum Bar $0{
d{ first: u32, second: usize},
b = 14,
a,
c(u32, usize),
}
"#,
r#"
enum Bar {
a,
b = 14,
c(u32, usize),
d{ first: u32, second: usize},
}
"#,
)
}
#[test]
fn sort_struct_enum_variant_fields() {
check_assist(
sort_items,
r#"
enum Bar {
d$0{ second: usize, first: u32 }$0,
b = 14,
a,
c(u32, usize),
}
"#,
r#"
enum Bar {
d{ first: u32, second: usize },
b = 14,
a,
c(u32, usize),
}
"#,
)
}
#[test]
fn sort_struct_enum_variant() {
check_assist(
sort_items,
r#"
enum Bar {
$0d$0{ second: usize, first: u32 },
}
"#,
r#"
enum Bar {
d{ first: u32, second: usize },
}
"#,
)
}
}
| |
player.rs
|
use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct Guild {
pub name: String,
pub realm: String,
}
#[derive(Deserialize, Serialize, Copy, Clone, Debug)]
pub enum Gender {
#[serde(rename = "male")]
Male,
#[serde(rename = "female")]
Female,
}
#[derive(Deserialize, Serialize, Copy, Clone, Debug)]
pub enum Faction {
#[serde(rename = "horde")]
Horde,
#[serde(rename = "alliance")]
Alliance,
}
#[derive(Deserialize, Serialize, Copy, Clone, Debug)]
pub enum Race {
Human,
Dwarf,
Gnome,
Draenei,
Worgen,
Pandaren,
Orc,
Undead,
Tauren,
Troll,
Goblin,
Mechagnome,
Nightborne,
Vulpera,
#[serde(rename = "Night Elf")]
NightElf,
#[serde(rename = "Blood Elf")]
BloodElf,
#[serde(rename = "Void Elf")]
VoidElf,
#[serde(rename = "Lightforged Draenei")]
LightforgedDraenei,
#[serde(rename = "Dark Iron Dwarf")]
DarkIronDwarf,
#[serde(rename = "Kul Tiran")]
KulTiran,
#[serde(rename = "Highmountain Tauren")]
HighmountainTauren,
#[serde(rename = "Mag'har Orc")]
MagharOrc,
#[serde(rename = "Zandalari Troll")]
ZandalariTroll,
}
#[derive(Deserialize, Serialize, Copy, Clone, Debug)]
pub enum Class {
Rogue,
Warrior,
Paladin,
Hunter,
Priest,
Shaman,
Mage,
Warlock,
Monk,
Druid,
#[serde(rename = "Demon Hunter")]
DemonHunter,
#[serde(rename = "Death Knight")]
DeathKnight,
}
#[derive(Deserialize, Serialize, Copy, Clone, Debug)]
pub enum Spec {
Arms,
Fury,
Protection,
Holy,
Retribution,
#[serde(rename = "Beast Mastery")]
BeastMastery,
Marksmanship,
Survival,
Outlaw,
Assassination,
Subtlety,
Discipline,
Shadow,
Elemental,
Restoration,
Enhancement,
|
Affliction,
Demonology,
Destruction,
Brewmaster,
Mistweaver,
Windwalker,
Balance,
Feral,
Guardian,
Havoc,
Vengeance,
Blood,
Unholy,
}
#[derive(Deserialize, Serialize, Copy, Clone, Debug)]
pub enum Role {
DPS,
#[serde(rename = "TANK")]
Tank,
#[serde(rename = "HEALING")]
Healing,
}
|
Arcane,
Fire,
Frost,
|
display_power.py
|
def
|
(power: dict) -> str:
name = power['name']
invocation_unit = power['invocation']['card_name']
invocation_instances = power['invocation']['num_instances']
cost = power['cost']
return f'{name} (Invocation: {invocation_instances} {invocation_unit}) - Cost: {cost}'
|
display_power
|
csinfo.entity.ts
|
import { Entity, Column, PrimaryGeneratedColumn, OneToMany } from "typeorm";
import { CSResult } from "./csresult.entity";
@Entity({name: "info"})
export class CSInfo {
|
@PrimaryGeneratedColumn()
id: number;
@Column({type: "varchar", length: 20})
cs_name: string;
@Column({type: "varchar", length: 20})
host: string;
@Column({type: "varchar", length: 30, nullable: true})
link: string;
@Column({type: "varchar", length: 20, nullable: true})
sns: string;
@OneToMany(() => CSResult, result => result.info)
results: CSResult[];
static create(
cs_name: string,
host: string,
link: string,
sns: string,
) {
const info = new CSInfo();
info.cs_name = cs_name;
info.host = host;
info.link = link;
info.sns = sns;
info.results = [];
return info;
}
}
| |
core.rs
|
// Copyright 2018 The xi-editor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! The fundamental druid types.
use std::collections::VecDeque;
use log;
use crate::bloom::Bloom;
use crate::kurbo::{Affine, Insets, Point, Rect, Shape, Size};
use crate::piet::RenderContext;
use crate::{
BoxConstraints, Command, Data, Env, Event, EventCtx, InternalEvent, InternalLifeCycle,
LayoutCtx, LifeCycle, LifeCycleCtx, PaintCtx, Target, UpdateCtx, Widget, WidgetId, WindowId,
};
/// Our queue type
pub(crate) type CommandQueue = VecDeque<(Target, Command)>;
/// A container for one widget in the hierarchy.
///
/// Generally, container widgets don't contain other widgets directly,
/// but rather contain a `WidgetPod`, which has additional state needed
/// for layout and for the widget to participate in event flow.
///
/// This struct also contains the previous data for a widget, which is
/// essential for the [`update`] method, both to decide when the update
/// needs to propagate, and to provide the previous data so that a
/// widget can process a diff between the old value and the new.
///
/// [`update`]: trait.Widget.html#tymethod.update
pub struct WidgetPod<T, W> {
state: BaseState,
old_data: Option<T>,
env: Option<Env>,
inner: W,
}
/// Generic state for all widgets in the hierarchy.
///
/// This struct contains the widget's layout rect, flags
/// indicating when the widget is active or focused, and other
/// state necessary for the widget to participate in event
/// flow.
///
/// It is provided to [`paint`] calls as a non-mutable reference,
/// largely so a widget can know its size, also because active
/// and focus state can affect the widget's appearance. Other than
/// that, widgets will generally not interact with it directly,
/// but it is an important part of the [`WidgetPod`] struct.
///
/// [`paint`]: trait.Widget.html#tymethod.paint
/// [`WidgetPod`]: struct.WidgetPod.html
#[derive(Clone)]
pub(crate) struct BaseState {
pub(crate) id: WidgetId,
/// The frame of this widget in its parents coordinate space.
/// This should always be set; it is only an `Option` so that we
/// can more easily track (and help debug) if it hasn't been set.
layout_rect: Option<Rect>,
/// The insets applied to the layout rect to generate the paint rect.
/// In general, these will be zero; the exception is for things like
/// drop shadows or overflowing text.
pub(crate) paint_insets: Insets,
// TODO: consider using bitflags for the booleans.
// This should become an invalidation rect.
pub(crate) needs_inval: bool,
pub(crate) is_hot: bool,
pub(crate) is_active: bool,
pub(crate) needs_layout: bool,
/// Any descendant is active.
has_active: bool,
/// In the focused path, starting from window and ending at the focused widget.
/// Descendants of the focused widget are not in the focused path.
pub(crate) has_focus: bool,
/// Any descendant has requested an animation frame.
pub(crate) request_anim: bool,
/// Any descendant has requested a timer.
///
/// Note: we don't have any way of clearing this request, as it's
/// likely not worth the complexity.
pub(crate) request_timer: bool,
pub(crate) focus_chain: Vec<WidgetId>,
pub(crate) request_focus: Option<FocusChange>,
pub(crate) children: Bloom<WidgetId>,
pub(crate) children_changed: bool,
}
/// Methods by which a widget can attempt to change focus state.
#[derive(Debug, Clone, Copy)]
pub(crate) enum FocusChange {
/// The focused widget is giving up focus.
Resign,
/// A specific widget wants focus
Focus(WidgetId),
/// Focus should pass to the next focusable widget
Next,
/// Focus should pass to the previous focusable widget
Previous,
}
impl<T, W: Widget<T>> WidgetPod<T, W> {
/// Create a new widget pod.
///
/// In a widget hierarchy, each widget is wrapped in a `WidgetPod`
/// so it can participate in layout and event flow. The process of
/// adding a child widget to a container should call this method.
pub fn new(inner: W) -> WidgetPod<T, W> {
let mut state = BaseState::new(inner.id().unwrap_or_else(WidgetId::next));
state.children_changed = true;
state.needs_layout = true;
WidgetPod {
state,
old_data: None,
env: None,
inner,
}
}
/// Read-only access to state. We don't mark the field as `pub` because
/// we want to control mutation.
pub(crate) fn state(&self) -> &BaseState {
&self.state
}
/// Query the "active" state of the widget.
pub fn is_active(&self) -> bool {
self.state.is_active
}
/// Returns `true` if any descendant is active.
pub fn has_active(&self) -> bool {
self.state.has_active
}
/// Query the "hot" state of the widget.
///
/// See [`EventCtx::is_hot`](struct.EventCtx.html#method.is_hot) for
/// additional information.
pub fn is_hot(&self) -> bool {
self.state.is_hot
}
/// Return a reference to the inner widget.
pub fn widget(&self) -> &W {
&self.inner
}
/// Return a mutable reference to the inner widget.
pub fn widget_mut(&mut self) -> &mut W {
&mut self.inner
}
/// Get the identity of the widget.
pub fn
|
(&self) -> WidgetId {
self.state.id
}
/// Set layout rectangle.
///
/// Intended to be called on child widget in container's `layout`
/// implementation.
pub fn set_layout_rect(&mut self, ctx: &mut LayoutCtx, data: &T, env: &Env, layout_rect: Rect) {
self.state.layout_rect = Some(layout_rect);
if WidgetPod::set_hot_state(
&mut self.inner,
ctx.command_queue,
&mut self.state,
ctx.window_id,
layout_rect,
ctx.mouse_pos,
data,
env,
) {
ctx.base_state.merge_up(&self.state);
}
}
#[deprecated(since = "0.5.0", note = "use layout_rect() instead")]
#[doc(hidden)]
pub fn get_layout_rect(&self) -> Rect {
self.layout_rect()
}
/// The layout rectangle.
///
/// This will be same value as set by `set_layout_rect`.
pub fn layout_rect(&self) -> Rect {
self.state.layout_rect.unwrap_or_default()
}
/// Get the widget's paint [`Rect`].
///
/// This is the [`Rect`] that widget has indicated it needs to paint in.
/// This is the same as the [`layout_rect`] with the [`paint_insets`] applied;
/// in the general case it is the same as the [`layout_rect`].
///
/// [`layout_rect`]: #method.layout_rect
/// [`Rect`]: struct.Rect.html
/// [`paint_insets`]: #method.paint_insets
pub fn paint_rect(&self) -> Rect {
self.state.paint_rect()
}
/// Return the paint [`Insets`] for this widget.
///
/// If these [`Insets`] are nonzero, they describe the area beyond a widget's
/// layout rect where it needs to paint.
///
/// These are generally zero; exceptions are widgets that do things like
/// paint a drop shadow.
///
/// A widget can set its insets by calling [`set_paint_insets`] during its
/// [`layout`] method.
///
/// [`Insets`]: struct.Insets.html
/// [`set_paint_insets`]: struct.LayoutCtx.html#method.set_paint_insets
/// [`layout`]: trait.Widget.html#tymethod.layout
pub fn paint_insets(&self) -> Insets {
self.state.paint_insets
}
/// Given a parents layout size, determine the appropriate paint `Insets`
/// for the parent.
///
/// This is a convenience method to be used from the [`layout`] method
/// of a `Widget` that manages a child; it allows the parent to correctly
/// propogate a child's desired paint rect, if it extends beyond the bounds
/// of the parent's layout rect.
///
/// [`layout`]: trait.Widget.html#tymethod.layout
/// [`Insets`]: struct.Insets.html
pub fn compute_parent_paint_insets(&self, parent_size: Size) -> Insets {
let parent_bounds = Rect::ZERO.with_size(parent_size);
let union_pant_rect = self.paint_rect().union(parent_bounds);
union_pant_rect - parent_bounds
}
/// Determines if the provided `mouse_pos` is inside `rect`
/// and if so updates the hot state and sends `LifeCycle::HotChanged`.
///
/// Returns `true` if the hot state changed.
///
/// The provided `child_state` should be merged up if this returns `true`.
#[allow(clippy::too_many_arguments)]
fn set_hot_state(
child: &mut W,
command_queue: &mut CommandQueue,
child_state: &mut BaseState,
window_id: WindowId,
rect: Rect,
mouse_pos: Option<Point>,
data: &T,
env: &Env,
) -> bool {
let had_hot = child_state.is_hot;
child_state.is_hot = match mouse_pos {
Some(pos) => rect.winding(pos) != 0,
None => false,
};
if had_hot != child_state.is_hot {
let hot_changed_event = LifeCycle::HotChanged(child_state.is_hot);
let mut child_ctx = LifeCycleCtx {
command_queue,
base_state: child_state,
window_id,
};
child.lifecycle(&mut child_ctx, &hot_changed_event, data, env);
return true;
}
false
}
}
impl<T: Data, W: Widget<T>> WidgetPod<T, W> {
/// Paint a child widget.
///
/// Generally called by container widgets as part of their [`paint`]
/// method.
///
/// Note that this method does not apply the offset of the layout rect.
/// If that is desired, use [`paint_with_offset`] instead.
///
/// [`layout`]: trait.Widget.html#tymethod.layout
/// [`paint`]: trait.Widget.html#tymethod.paint
/// [`paint_with_offset`]: #method.paint_with_offset
pub fn paint(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) {
let mut inner_ctx = PaintCtx {
render_ctx: ctx.render_ctx,
window_id: ctx.window_id,
z_ops: Vec::new(),
region: ctx.region.clone(),
base_state: &self.state,
focus_widget: ctx.focus_widget,
};
self.inner.paint(&mut inner_ctx, data, env);
ctx.z_ops.append(&mut inner_ctx.z_ops);
if env.get(Env::DEBUG_PAINT) {
const BORDER_WIDTH: f64 = 1.0;
let rect = inner_ctx.size().to_rect().inset(BORDER_WIDTH / -2.0);
let id = self.id().to_raw();
let color = env.get_debug_color(id);
inner_ctx.stroke(rect, &color, BORDER_WIDTH);
}
self.state.needs_inval = false;
}
/// Paint the widget, translating it by the origin of its layout rectangle.
///
/// This will recursively paint widgets, stopping if a widget's layout
/// rect is outside of the currently visible region.
// Discussion: should this be `paint` and the other `paint_raw`?
pub fn paint_with_offset(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) {
self.paint_with_offset_impl(ctx, data, env, false)
}
/// Paint the widget, even if its layout rect is outside of the currently
/// visible region.
pub fn paint_with_offset_always(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) {
self.paint_with_offset_impl(ctx, data, env, true)
}
/// Shared implementation that can skip drawing non-visible content.
fn paint_with_offset_impl(
&mut self,
ctx: &mut PaintCtx,
data: &T,
env: &Env,
paint_if_not_visible: bool,
) {
if !paint_if_not_visible && !ctx.region().intersects(self.state.paint_rect()) {
return;
}
ctx.with_save(|ctx| {
let layout_origin = self.layout_rect().origin().to_vec2();
ctx.transform(Affine::translate(layout_origin));
let visible = ctx.region().to_rect() - layout_origin;
ctx.with_child_ctx(visible, |ctx| self.paint(ctx, data, env));
});
}
/// Compute layout of a widget.
///
/// Generally called by container widgets as part of their [`layout`]
/// method.
///
/// [`layout`]: trait.Widget.html#tymethod.layout
pub fn layout(
&mut self,
ctx: &mut LayoutCtx,
bc: &BoxConstraints,
data: &T,
env: &Env,
) -> Size {
self.state.needs_layout = false;
let child_mouse_pos = match ctx.mouse_pos {
Some(pos) => Some(pos - self.layout_rect().origin().to_vec2()),
None => None,
};
let mut child_ctx = LayoutCtx {
command_queue: ctx.command_queue,
base_state: &mut self.state,
window_id: ctx.window_id,
text_factory: ctx.text_factory,
mouse_pos: child_mouse_pos,
};
let size = self.inner.layout(&mut child_ctx, bc, data, env);
ctx.base_state.merge_up(&child_ctx.base_state);
if size.width.is_infinite() {
let name = self.widget().type_name();
log::warn!("Widget `{}` has an infinite width.", name);
}
if size.height.is_infinite() {
let name = self.widget().type_name();
log::warn!("Widget `{}` has an infinite height.", name);
}
size
}
/// Propagate an event.
///
/// Generally the [`event`] method of a container widget will call this
/// method on all its children. Here is where a great deal of the event
/// flow logic resides, particularly whether to continue propagating
/// the event.
///
/// [`event`]: trait.Widget.html#tymethod.event
pub fn event(&mut self, ctx: &mut EventCtx, event: &Event, data: &mut T, env: &Env) {
if self.old_data.is_none() {
log::error!(
"widget {:?} is receiving an event without having first \
received WidgetAdded.",
ctx.widget_id()
);
}
// log if we seem not to be laid out when we should be
if !matches!(event, Event::WindowConnected | Event::WindowSize(_))
&& self.state.layout_rect.is_none()
{
log::warn!(
"Widget '{}' received an event ({:?}) without having been laid out. \
This likely indicates a missed call to set_layout_rect.",
self.inner.type_name(),
event,
);
}
// TODO: factor as much logic as possible into monomorphic functions.
if ctx.is_handled {
// This function is called by containers to propagate an event from
// containers to children. Non-recurse events will be invoked directly
// from other points in the library.
return;
}
let had_active = self.state.has_active;
let mut child_ctx = EventCtx {
cursor: ctx.cursor,
command_queue: ctx.command_queue,
window: ctx.window,
window_id: ctx.window_id,
base_state: &mut self.state,
is_handled: false,
is_root: false,
focus_widget: ctx.focus_widget,
};
let rect = child_ctx.base_state.layout_rect.unwrap_or_default();
// Note: could also represent this as `Option<Event>`.
let mut recurse = true;
let child_event = match event {
Event::Internal(internal) => match internal {
InternalEvent::MouseLeave => {
let hot_changed = WidgetPod::set_hot_state(
&mut self.inner,
child_ctx.command_queue,
child_ctx.base_state,
child_ctx.window_id,
rect,
None,
data,
env,
);
recurse = had_active || hot_changed;
Event::Internal(InternalEvent::MouseLeave)
}
InternalEvent::TargetedCommand(target, cmd) => {
match target {
Target::Window(_) => Event::Command(cmd.clone()),
Target::Widget(id) if *id == child_ctx.widget_id() => {
Event::Command(cmd.clone())
}
Target::Widget(id) => {
// Recurse when the target widget could be our descendant.
// The bloom filter we're checking can return false positives.
recurse = child_ctx.base_state.children.may_contain(id);
Event::Internal(InternalEvent::TargetedCommand(*target, cmd.clone()))
}
Target::Global => {
panic!("Target::Global should be converted before WidgetPod")
}
}
}
},
Event::WindowConnected => Event::WindowConnected,
Event::WindowSize(size) => {
child_ctx.request_layout();
recurse = ctx.is_root;
Event::WindowSize(*size)
}
Event::MouseDown(mouse_event) => {
WidgetPod::set_hot_state(
&mut self.inner,
child_ctx.command_queue,
child_ctx.base_state,
child_ctx.window_id,
rect,
Some(mouse_event.pos),
data,
env,
);
recurse = had_active || child_ctx.base_state.is_hot;
let mut mouse_event = mouse_event.clone();
mouse_event.pos -= rect.origin().to_vec2();
Event::MouseDown(mouse_event)
}
Event::MouseUp(mouse_event) => {
WidgetPod::set_hot_state(
&mut self.inner,
child_ctx.command_queue,
child_ctx.base_state,
child_ctx.window_id,
rect,
Some(mouse_event.pos),
data,
env,
);
recurse = had_active || child_ctx.base_state.is_hot;
let mut mouse_event = mouse_event.clone();
mouse_event.pos -= rect.origin().to_vec2();
Event::MouseUp(mouse_event)
}
Event::MouseMove(mouse_event) => {
let hot_changed = WidgetPod::set_hot_state(
&mut self.inner,
child_ctx.command_queue,
child_ctx.base_state,
child_ctx.window_id,
rect,
Some(mouse_event.pos),
data,
env,
);
recurse = had_active || child_ctx.base_state.is_hot || hot_changed;
let mut mouse_event = mouse_event.clone();
mouse_event.pos -= rect.origin().to_vec2();
Event::MouseMove(mouse_event)
}
Event::KeyDown(e) => {
recurse = child_ctx.has_focus();
Event::KeyDown(*e)
}
Event::KeyUp(e) => {
recurse = child_ctx.has_focus();
Event::KeyUp(*e)
}
Event::Paste(e) => {
recurse = child_ctx.has_focus();
Event::Paste(e.clone())
}
Event::Wheel(wheel_event) => {
recurse = had_active || child_ctx.base_state.is_hot;
Event::Wheel(wheel_event.clone())
}
Event::Zoom(zoom) => {
recurse = had_active || child_ctx.base_state.is_hot;
Event::Zoom(*zoom)
}
Event::Timer(id) => {
recurse = child_ctx.base_state.request_timer;
Event::Timer(*id)
}
Event::Command(cmd) => Event::Command(cmd.clone()),
};
if recurse {
child_ctx.base_state.has_active = false;
self.inner.event(&mut child_ctx, &child_event, data, env);
child_ctx.base_state.has_active |= child_ctx.base_state.is_active;
};
ctx.base_state.merge_up(&child_ctx.base_state);
ctx.is_handled |= child_ctx.is_handled;
}
pub fn lifecycle(&mut self, ctx: &mut LifeCycleCtx, event: &LifeCycle, data: &T, env: &Env) {
let recurse = match event {
LifeCycle::Internal(internal) => match internal {
InternalLifeCycle::RouteWidgetAdded => {
// if this is called either we were just created, in
// which case we need to change lifecycle event to
// WidgetAdded or in case we were already created
// we just pass this event down
if self.old_data.is_none() {
self.lifecycle(ctx, &LifeCycle::WidgetAdded, data, env);
return;
} else {
if self.state.children_changed {
self.state.children.clear();
self.state.focus_chain.clear();
}
self.state.children_changed
}
}
InternalLifeCycle::RouteFocusChanged { old, new } => {
self.state.request_focus = None;
let this_changed = if *old == Some(self.state.id) {
Some(false)
} else if *new == Some(self.state.id) {
Some(true)
} else {
None
};
if let Some(change) = this_changed {
// Only send FocusChanged in case there's actual change
if old != new {
self.state.has_focus = change;
let event = LifeCycle::FocusChanged(change);
self.inner.lifecycle(ctx, &event, data, env);
}
false
} else {
self.state.has_focus = false;
// Recurse when the target widgets could be our descendants.
// The bloom filter we're checking can return false positives.
match (old, new) {
(Some(old), _) if self.state.children.may_contain(old) => true,
(_, Some(new)) if self.state.children.may_contain(new) => true,
_ => false,
}
}
}
#[cfg(test)]
InternalLifeCycle::DebugRequestState { widget, state_cell } => {
if *widget == self.id() {
state_cell.set(self.state.clone());
false
} else {
// Recurse when the target widget could be our descendant.
// The bloom filter we're checking can return false positives.
self.state.children.may_contain(&widget)
}
}
#[cfg(test)]
InternalLifeCycle::DebugInspectState(f) => {
f.call(&self.state);
true
}
},
LifeCycle::AnimFrame(_) => {
let r = self.state.request_anim;
self.state.request_anim = false;
r
}
LifeCycle::WidgetAdded => {
assert!(self.old_data.is_none());
self.old_data = Some(data.clone());
self.env = Some(env.clone());
true
}
LifeCycle::HotChanged(_) => false,
LifeCycle::FocusChanged(_) => {
// We are a descendant of a widget that has/had focus.
// Descendants don't inherit focus, so don't recurse.
false
}
};
if recurse {
let mut child_ctx = LifeCycleCtx {
command_queue: ctx.command_queue,
base_state: &mut self.state,
window_id: ctx.window_id,
};
self.inner.lifecycle(&mut child_ctx, event, data, env);
}
ctx.base_state.merge_up(&self.state);
// we need to (re)register children in case of one of the following events
match event {
LifeCycle::WidgetAdded | LifeCycle::Internal(InternalLifeCycle::RouteWidgetAdded) => {
self.state.children_changed = false;
ctx.base_state.children = ctx.base_state.children.union(self.state.children);
ctx.base_state.focus_chain.extend(&self.state.focus_chain);
ctx.register_child(self.id());
}
_ => (),
}
}
/// Propagate a data update.
///
/// Generally called by container widgets as part of their [`update`]
/// method.
///
/// [`update`]: trait.Widget.html#tymethod.update
pub fn update(&mut self, ctx: &mut UpdateCtx, data: &T, env: &Env) {
match (self.old_data.as_ref(), self.env.as_ref()) {
(Some(d), Some(e)) if d.same(data) && e.same(env) => return,
(None, _) => {
log::warn!("old_data missing in {:?}, skipping update", self.id());
self.old_data = Some(data.clone());
self.env = Some(env.clone());
return;
}
_ => (),
}
let mut child_ctx = UpdateCtx {
window: ctx.window,
base_state: &mut self.state,
window_id: ctx.window_id,
command_queue: ctx.command_queue,
};
self.inner
.update(&mut child_ctx, self.old_data.as_ref().unwrap(), data, env);
self.old_data = Some(data.clone());
self.env = Some(env.clone());
ctx.base_state.merge_up(&self.state)
}
}
impl<T, W: Widget<T> + 'static> WidgetPod<T, W> {
/// Box the contained widget.
///
/// Convert a `WidgetPod` containing a widget of a specific concrete type
/// into a dynamically boxed widget.
pub fn boxed(self) -> WidgetPod<T, Box<dyn Widget<T>>> {
WidgetPod::new(Box::new(self.inner))
}
}
impl BaseState {
pub(crate) fn new(id: WidgetId) -> BaseState {
BaseState {
id,
layout_rect: None,
paint_insets: Insets::ZERO,
needs_inval: false,
is_hot: false,
needs_layout: false,
is_active: false,
has_active: false,
has_focus: false,
request_anim: false,
request_timer: false,
request_focus: None,
focus_chain: Vec::new(),
children: Bloom::new(),
children_changed: false,
}
}
/// Update to incorporate state changes from a child.
fn merge_up(&mut self, child_state: &BaseState) {
self.needs_inval |= child_state.needs_inval;
self.needs_layout |= child_state.needs_layout;
self.request_anim |= child_state.request_anim;
self.request_timer |= child_state.request_timer;
self.has_active |= child_state.has_active;
self.has_focus |= child_state.has_focus;
self.children_changed |= child_state.children_changed;
self.request_focus = self.request_focus.or(child_state.request_focus);
}
#[inline]
pub(crate) fn size(&self) -> Size {
self.layout_rect.unwrap_or_default().size()
}
/// The paint region for this widget.
///
/// For more information, see [`WidgetPod::paint_rect`].
///
/// [`WidgetPod::paint_rect`]: struct.WidgetPod.html#method.paint_rect
pub(crate) fn paint_rect(&self) -> Rect {
self.layout_rect.unwrap_or_default() + self.paint_insets
}
#[cfg(test)]
#[allow(dead_code)]
pub(crate) fn layout_rect(&self) -> Rect {
self.layout_rect.unwrap_or_default()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::widget::{Flex, Scroll, Split, TextBox};
use crate::{WidgetExt, WindowId};
const ID_1: WidgetId = WidgetId::reserved(0);
const ID_2: WidgetId = WidgetId::reserved(1);
const ID_3: WidgetId = WidgetId::reserved(2);
#[test]
fn register_children() {
fn make_widgets() -> impl Widget<Option<u32>> {
Split::columns(
Flex::<Option<u32>>::row()
.with_child(TextBox::new().with_id(ID_1).parse())
.with_child(TextBox::new().with_id(ID_2).parse())
.with_child(TextBox::new().with_id(ID_3).parse()),
Scroll::new(TextBox::new().parse()),
)
}
let widget = make_widgets();
let mut widget = WidgetPod::new(widget).boxed();
let mut command_queue: CommandQueue = VecDeque::new();
let mut state = BaseState::new(WidgetId::next());
let mut ctx = LifeCycleCtx {
command_queue: &mut command_queue,
base_state: &mut state,
window_id: WindowId::next(),
};
let env = Env::default();
widget.lifecycle(&mut ctx, &LifeCycle::WidgetAdded, &None, &env);
assert!(ctx.base_state.children.may_contain(&ID_1));
assert!(ctx.base_state.children.may_contain(&ID_2));
assert!(ctx.base_state.children.may_contain(&ID_3));
assert_eq!(ctx.base_state.children.entry_count(), 7);
}
}
|
id
|
bootstrap.go
|
/*
Copyright 2022 The KCP Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package wildwest
import (
"context"
"embed"
"testing"
"github.com/stretchr/testify/require"
apiextensionsv1client "k8s.io/apiextensions-apiserver/pkg/client/clientset/clientset/typed/apiextensions/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
configcrds "github.com/kcp-dev/kcp/config/crds"
)
//go:embed *.yaml
var rawCustomResourceDefinitions embed.FS
func Create(t *testing.T, client apiextensionsv1client.CustomResourceDefinitionInterface, grs ...metav1.GroupResource)
|
{
ctx := context.Background()
if deadline, ok := t.Deadline(); ok {
withDeadline, cancel := context.WithDeadline(ctx, deadline)
t.Cleanup(cancel)
ctx = withDeadline
}
err := configcrds.CreateFromFS(ctx, client, rawCustomResourceDefinitions, grs...)
require.NoError(t, err)
}
|
|
0031_auto_20190624_1503.py
|
# Generated by Django 2.2.2 on 2019-06-24 15:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0030_auto_20190624_1458'),
]
operations = [
migrations.AlterField(
|
model_name='contact',
name='tipom',
field=models.CharField(choices=[('Felicitación', 'Felicitación'), ('Queja', 'Queja'), ('Petición', 'Petición'), ('Solicitud', 'Solicitud'), ('Reclamo', 'Reclamo')], default='Petición', max_length=50, verbose_name='Categoría'),
),
]
| |
randombytes.rs
|
//! Cryptographic random number generation.
use ffi;
use std::iter::repeat;
/// `randombytes()` randomly generates size bytes of data.
///
/// THREAD SAFETY: `randombytes()` is thread-safe provided that you have
/// called `sodiumoxide::init()` once before using any other function
/// from sodiumoxide.
pub fn randombytes(size: usize) -> Vec<u8>
|
/// `randombytes_into()` fills a buffer `buf` with random data.
///
/// THREAD SAFETY: `randombytes_into()` is thread-safe provided that you have
/// called `sodiumoxide::init()` once before using any other function
/// from sodiumoxide.
pub fn randombytes_into(buf: &mut [u8]) {
unsafe {
ffi::randombytes_buf(buf.as_mut_ptr(), buf.len());
}
}
|
{
unsafe {
let mut buf: Vec<u8> = repeat(0u8).take(size).collect();
let pbuf = buf.as_mut_ptr();
ffi::randombytes_buf(pbuf, size);
buf
}
}
|
65.3fec5a2a.js
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[65],{562:function(t,s,a){"use strict";a.r(s);var n=a(43),e=Object(n.a)({},function(){var t=this,s=t.$createElement,a=t._self._c||s;return a("ContentSlotsDistributor",{attrs:{"slot-key":t.$parent.slotKey}},[a("h1",{attrs:{id:"web-view"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#web-view","aria-hidden":"true"}},[t._v("#")]),t._v(" <web-view>")]),t._v(" "),a("blockquote",[a("p",[a("code",[t._v("<web-view>")]),t._v(" 是一个网页容器。")])]),t._v(" "),a("h2",{attrs:{id:"子组件"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#子组件","aria-hidden":"true"}},[t._v("#")]),t._v(" 子组件")]),t._v(" "),a("p",[t._v("无")]),t._v(" "),a("h2",{attrs:{id:"示例代码"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#示例代码","aria-hidden":"true"}},[t._v("#")]),t._v(" 示例代码")]),t._v(" "),a("div",{staticClass:"language-vue extra-class"},[a("pre",{pre:!0,attrs:{class:"language-vue"}},[a("code",[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("template")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("div")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("class")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("app"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n\n "),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("web-view")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("ref")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("reflectName"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v(" "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("class")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("webview"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v(" "),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("web-view")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n\n "),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("div")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("template")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("style")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),a("span",{pre:!0,attrs:{class:"token style"}},[a("span",{pre:!0,attrs:{class:"token language-css"}},[t._v("\n "),a("span",{pre:!0,attrs:{class:"token selector"}},[t._v(".app")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token property"}},[t._v("width")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" 750px"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token property"}},[t._v("flex")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" 1"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n\n "),a("span",{pre:!0,attrs:{class:"token selector"}},[t._v(".webview")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token property"}},[t._v("flex")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" 1"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n")])]),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("style")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("script")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),a("span",{pre:!0,attrs:{class:"token script"}},[a("span",{pre:!0,attrs:{class:"token language-javascript"}},[t._v("\n "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("export")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("default")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("mounted")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("this")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("$refs"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("reflectName"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("setUrl")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v("'https://eeui.app'")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n")])]),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("script")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n")])])]),a("h2",{attrs:{id:"配置参数"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#配置参数","aria-hidden":"true"}},[t._v("#")]),t._v(" 配置参数")]),t._v(" "),a("table",[a("thead",[a("tr",[a("th",[t._v("属性名")]),t._v(" "),a("th",[t._v("类型")]),t._v(" "),a("th",[t._v("描述")]),t._v(" "),a("th",[t._v("默认值")])])]),t._v(" "),a("tbody",[a("tr",[a("td",[t._v("url")]),t._v(" "),a("td",[a("code",[t._v("String")])]),t._v(" "),a("td",[t._v("网页地址")]),t._v(" "),a("td",[t._v("-")])]),t._v(" "),a("tr",[a("td",[t._v("content")]),t._v(" "),a("td",[a("code",[t._v("String")])]),t._v(" "),a("td",[t._v("网页内容")]),t._v(" "),a("td",[t._v("-")])]),t._v(" "),a("tr",[a("td",[t._v("progressbarVisibility")]),t._v(" "),a("td",[a("code",[t._v("Boolean")])]),t._v(" "),a("td",[t._v("是否显示进度条")]),t._v(" "),a("td",[t._v("true")])]),t._v(" "),a("tr",[a("td",[t._v("scrollEnabled")]),t._v(" "),a("td",[a("code",[t._v("Boolean")])]),t._v(" "),a("td",[t._v("是否可以滚动")]),t._v(" "),a("td",[t._v("true")])]),t._v(" "),a("tr",[a("td",[t._v("enableApi")]),t._v(" "),a("td",[a("code",[t._v("Boolean")])]),t._v(" "),a("td",[t._v("开启eeui等原生交互模块,详见:注①")]),t._v(" "),a("td",[t._v("true")])]),t._v(" "),a("tr",[a("td",[t._v("userAgent")]),t._v(" "),a("td",[a("code",[t._v("String")])]),t._v(" "),a("td",[t._v("自定义浏览器userAgent(保留原有的UA)")]),t._v(" "),a("td",[t._v("-")])]),t._v(" "),a("tr",[a("td",[t._v("customUserAgent")]),t._v(" "),a("td",[a("code",[t._v("String")])]),t._v(" "),a("td",[t._v("完全自定义浏览器userAgent(去除原有的UA)")]),t._v(" "),a("td",[t._v("-")])]),t._v(" "),a("tr",[a("td",[t._v("transparency "),a("Tag",{attrs:{date:"20200213",value:"2.0.0+"}})],1),t._v(" "),a("td",[a("code",[t._v("Boolean")])]),t._v(" "),a("td",[t._v("设置背景透明(前提是网页背景也是透明,否则无效)")]),t._v(" "),a("td",[t._v("false")])])])]),t._v(" "),a("blockquote",[a("p",[t._v("例如:")])]),t._v(" "),a("div",{staticClass:"language-vue extra-class"},[a("pre",{pre:!0,attrs:{class:"language-vue"}},[a("code",[t._v("//示例1\n"),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("web-view")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("ref")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("reflectName"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("url")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("https://eeui.app"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("web-view")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n \n//示例2\n"),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("web-view")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("ref")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("reflectName2"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("content")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("网页内容....."),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("web-view")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n")])])]),a("blockquote",[a("p",[t._v("注①:")])]),t._v(" "),a("p",[t._v("JS支持调用的原生模块如下:(另注:插件市场内的插件是否支持web-view调用且看每个插件开发而异。)")]),t._v(" "),a("div",{staticClass:"language-js extra-class"},[a("pre",{pre:!0,attrs:{class:"language-js"}},[a("code",[a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("//综合模块")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("let")]),t._v(" eeui "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("requireModuleJs")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"eeui"')]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("//路由模块")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("let")]),t._v(" navigator "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("requireModuleJs")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"navigator"')]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("//导航标题栏模块")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("let")]),t._v(" navigationBar "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("requireModuleJs")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"navigationBar"')]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("//浏览器调用方法")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("let")]),t._v(" webview "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("requireModuleJs")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"webview"')]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("//【插件市场】城市选择器")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("let")]),t._v(" citypicker "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("requireModuleJs")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"eeui/citypicker"')]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("//【插件市场】图片选择模块")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("let")]),t._v(" picture "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("requireModuleJs")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"eeui/picture"')]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("//【插件市场】支付模块")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("let")]),t._v(" pay "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("requireModuleJs")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"eeui/pay"')]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n")])])]),a("p",[t._v("例如:")]),t._v(" "),a("div",{staticClass:"language-js extra-class"},[a("pre",{pre:!0,attrs:{class:"language-js"}},[a("code",[a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("let")]),t._v(" eeui "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("requireModuleJs")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"eeui"')]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\neeui"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("adDialog")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"http://..../xxx.png"')]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("function")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token parameter"}},[t._v("res")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n eeui"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("toast")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"状态:"')]),t._v(" "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("+")]),t._v(" res"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("status"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n")])])]),a("ul",[a("li",[a("code",[t._v("eeui")]),t._v(":综合模块,如:"),a("RouterLink",{attrs:{to:"/module/adDialog.html"}},[t._v("eeui.adDialog")])],1),t._v(" "),a("li",[a("code",[t._v("navigator")]),t._v(":"),a("RouterLink",{attrs:{to:"/module/navigator.html"}},[t._v("路由模块")])],1),t._v(" "),a("li",[a("code",[t._v("navigationBar")]),t._v(":"),a("RouterLink",{attrs:{to:"/navigationBar.html"}},[t._v("导航标题栏模块")])],1),t._v(" "),a("li",[a("code",[t._v("citypicker")]),t._v(":"),a("a",{attrs:{href:"../markets/eeui/citypicker"}},[t._v("城市选择器")])]),t._v(" "),a("li",[a("code",[t._v("picture")]),t._v(":"),a("a",{attrs:{href:"../markets/eeui/picture"}},[t._v("图片选择模块")])]),t._v(" "),a("li",[a("code",[t._v("pay")]),t._v(":"),a("a",{attrs:{href:"../markets/eeui/pay"}},[t._v("支付模块")])]),t._v(" "),a("li",[a("code",[t._v("webview")]),t._v(":"),a("RouterLink",{attrs:{to:"/component/web-view.html#浏览器调用方法"}},[t._v("浏览器调用方法")])],1)]),t._v(" "),a("h4",{attrs:{id:"js调用原生api示例"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#js调用原生api示例","aria-hidden":"true"}},[t._v("#")]),t._v(" JS调用原生API示例")]),t._v(" "),a("div",{staticClass:"language-html extra-class"},[a("pre",{pre:!0,attrs:{class:"language-html"}},[a("code",[a("span",{pre:!0,attrs:{class:"token doctype"}},[t._v("<!DOCTYPE html>")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("html")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("lang")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("en"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("head")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("meta")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("charset")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("UTF-8"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("title")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("demo"),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("title")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("meta")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("http-equiv")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("X-UA-Compatible"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v(" "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("content")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("IE=edge,chrome=1"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("/>")])]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("meta")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("name")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("viewport"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v(" "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("content")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("style")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("type")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("text/css"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),a("span",{pre:!0,attrs:{class:"token style"}},[a("span",{pre:!0,attrs:{class:"token language-css"}},[t._v("\n "),a("span",{pre:!0,attrs:{class:"token selector"}},[t._v(".scan")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token property"}},[t._v("font-size")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" 14px"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token property"}},[t._v("margin")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" 32px"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token property"}},[t._v("text-align")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" center"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n ")])]),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("style")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("script")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("type")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("text/javascript"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),a("span",{pre:!0,attrs:{class:"token script"}},[a("span",{pre:!0,attrs:{class:"token language-javascript"}},[t._v("\n "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("function")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("openScan")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("let")]),t._v(" eeui "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("requireModuleJs")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"eeui"')]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n eeui"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("openScaner")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("function")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token parameter"}},[t._v("res")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("switch")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("res"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("status"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("case")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"success"')]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v("\n eeui"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("toast")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"识别成功:"')]),t._v(" "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("+")]),t._v(" res"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("text"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("break")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("case")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"failed"')]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(":")]),t._v("\n eeui"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("toast")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"识别失败"')]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("break")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("function")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("$ready")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("//网页加载完成时组件会自动执行此方法")]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n ")])]),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("script")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("head")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("body")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n "),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("<")]),t._v("div")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("class")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("scan"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),t._v(" "),a("span",{pre:!0,attrs:{class:"token attr-name"}},[t._v("onclick")]),a("span",{pre:!0,attrs:{class:"token attr-value"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("=")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')]),t._v("openScan()"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v('"')])]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("打开扫一扫"),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("div")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("body")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token tag"}},[a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("</")]),t._v("html")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(">")])]),t._v("\n")])])]),a("h2",{attrs:{id:"事件回调-callback"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#事件回调-callback","aria-hidden":"true"}},[t._v("#")]),t._v(" 事件回调 "),a("code",[t._v("callback")])]),t._v(" "),a("div",{staticClass:"language-js extra-class"},[a("pre",{pre:!0,attrs:{class:"language-js"}},[a("code",[a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 组件加载完成\n */")]),t._v("\n@"),a("span",{pre:!0,attrs:{class:"token function-variable function"}},[t._v("ready")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("function")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("...")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 监听状态发生改变\n * 返回参数:data = {\n status:'success', //状态,注②\n \n title: '网页标题', //仅【status=title】存在\n url: 'http://....', //仅【status=url】存在\n \n errCode: '', //仅【status=error】存在\n errMsg: '', //仅【status=error】存在\n errUrl: '', //仅【status=error】存在\n }\n */")]),t._v("\n@"),a("span",{pre:!0,attrs:{class:"token function-variable function"}},[t._v("stateChanged")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("function")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token parameter"}},[t._v("data")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("...")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 监听网页高度发生改变\n * 返回参数:data = {\n height:100, //变化的高度\n }\n */")]),t._v("\n@"),a("span",{pre:!0,attrs:{class:"token function-variable function"}},[t._v("heightChanged")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("function")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token parameter"}},[t._v("data")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("...")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 监听网页向组件发送消息\n * 返回参数:data = {\n message:123456, //详细消息\n }\n */")]),t._v("\n@"),a("span",{pre:!0,attrs:{class:"token function-variable function"}},[t._v("receiveMessage")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("function")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token parameter"}},[t._v("data")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("{")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("...")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n")])])]),a("blockquote",[a("p",[t._v("注②:")])]),t._v(" "),a("ul",[a("li",[a("code",[t._v("start")]),t._v("开始加载")]),t._v(" "),a("li",[a("code",[t._v("success")]),t._v("加载完毕")]),t._v(" "),a("li",[a("code",[t._v("error")]),t._v("加载错误")]),t._v(" "),a("li",[a("code",[t._v("title")]),t._v("标题发生改变")]),t._v(" "),a("li",[a("code",[t._v("url")]),t._v("网页地址发生改变")])]),t._v(" "),a("h2",{attrs:{id:"调用方法-methods"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#调用方法-methods","aria-hidden":"true"}},[t._v("#")]),t._v(" 调用方法 "),a("code",[t._v("methods")])]),t._v(" "),a("div",{staticClass:"language-js extra-class"},[a("pre",{pre:!0,attrs:{class:"language-js"}},[a("code",[a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 设置浏览器内容\n * 参数一:详细内容\n */")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("this")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("$refs"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("reflectName"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("setContent")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("string"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 设置浏览器地址\n * 参数一:地址,如:https://eeui.app\n */")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("this")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("$refs"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("reflectName"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("setUrl")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("url"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 向浏览器内发送js代码\n * 参数一:js代码,如:alert('demo');\n */")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("this")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("$refs"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("reflectName"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("setJavaScript")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("javascript"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 是否可以后退\n * \n * 回调 result: true|false\n */")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("this")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("$refs"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("reflectName"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("canGoBack")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("callback")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("result"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 后退并返回是否后退成功\n * \n * 回调 result: true|false\n */")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("this")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("$refs"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("reflectName"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("goBack")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("callback")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("result"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 是否可以前进\n * \n * 回调 result: true|false\n */")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("this")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("$refs"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("reflectName"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("canGoForward")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("callback")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("result"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 前进并返回是否前进成功\n * \n * 回调 result: true|false\n */")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("this")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("$refs"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("reflectName"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("goForward")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("callback")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("result"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 设置是否显示进度条\n * 参数一:true|false\n */")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("this")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("$refs"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("reflectName"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("setProgressbarVisibility")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token boolean"}},[t._v("true")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 设置是否可以滚动\n * 参数一:true|false\n */")]),t._v("\n"),a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("this")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("$refs"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),t._v("reflectName"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("setScrollEnabled")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token boolean"}},[t._v("true")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n")])])]),a("h2",{attrs:{id:"浏览器内js调用原生api方法"}},[a("a",{staticClass:"header-anchor",attrs:{href:"#浏览器内js调用原生api方法","aria-hidden":"true"}},[t._v("#")]),t._v(" 浏览器内js调用原生api方法")]),t._v(" "),a("div",{staticClass:"language-js extra-class"},[a("pre",{pre:!0,attrs:{class:"language-js"}},[a("code",[a("span",{pre:!0,attrs:{class:"token keyword"}},[t._v("let")]),t._v(" webview "),a("span",{pre:!0,attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("requireModuleJs")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token string"}},[t._v('"webview"')]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 设置浏览器内容\n * 参数一:详细内容\n */")]),t._v("\nwebview"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("setContent")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("string"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 设置浏览器地址\n * 参数一:地址,如:https://eeui.app\n */")]),t._v("\nwebview"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("setUrl")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("url"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 向组件发送信息,组件通过receiveMessage事件获取信息\n * 参数一:要发送的数据,如:123456\n */")]),t._v("\nwebview"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("sendMessage")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("url"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 是否可以后退\n * \n * 回调 result: true|false\n */")]),t._v("\nwebview"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("canGoBack")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("callback")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("result"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 后退并返回是否后退成功\n * \n * 回调 result: true|false\n */")]),t._v("\nwebview"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("goBack")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("callback")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("result"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 是否可以前进\n * \n * 回调 result: true|false\n */")]),t._v("\nwebview"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("canGoForward")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("callback")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("result"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 前进并返回是否前进成功\n * \n * 回调 result: true|false\n */")]),t._v("\nwebview"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("goForward")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("callback")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),t._v("result"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 设置是否显示进度条\n * 参数一:true|false\n */")]),t._v("\nwebview"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("setProgressbarVisibility")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token boolean"}},[t._v("true")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n"),a("span",{pre:!0,attrs:{class:"token comment"}},[t._v("/**\n * 设置是否可以滚动\n * 参数一:true|false\n */")]),t._v("\nwebview"),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(".")]),a("span",{pre:!0,attrs:{class:"token function"}},[t._v("setScrollEnabled")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v("(")]),a("span",{pre:!0,attrs:{class:"token boolean"}},[t._v("true")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(")")]),a("span",{pre:!0,attrs:{class:"token punctuation"}},[t._v(";")]),t._v("\n\n")])])])])},[],!1,null,null,null);s.default=e.exports}}]);
| ||
layers.py
|
from tensorflow import keras
import tensorflow as tf
class BatchNormalization(keras.layers.BatchNormalization):
"""
Identical to keras.layers.BatchNormalization, but adds the option to freeze parameters.
"""
def __init__(self, freeze, *args, **kwargs):
self.freeze = freeze
super(BatchNormalization, self).__init__(*args, **kwargs)
# set to non-trainable if freeze is true
self.trainable = not self.freeze
def call(self, inputs, training=None, **kwargs):
# return super.call, but set training
if not training:
return super(BatchNormalization, self).call(inputs, training=False)
else:
return super(BatchNormalization, self).call(inputs, training=(not self.freeze))
def get_config(self):
config = super(BatchNormalization, self).get_config()
config.update({'freeze': self.freeze})
return config
class
|
(keras.layers.Layer):
def __init__(self, epsilon=1e-4, **kwargs):
super(wBiFPNAdd, self).__init__(**kwargs)
self.epsilon = epsilon
def build(self, input_shape):
num_in = len(input_shape)
self.w = self.add_weight(name=self.name,
shape=(num_in,),
initializer=keras.initializers.constant(1 / num_in),
trainable=True,
dtype=tf.float32)
def call(self, inputs, **kwargs):
w = keras.activations.relu(self.w)
x = tf.reduce_sum([w[i] * inputs[i] for i in range(len(inputs))], axis=0)
x = x / (tf.reduce_sum(w) + self.epsilon)
return x
def compute_output_shape(self, input_shape):
return input_shape[0]
def get_config(self):
config = super(wBiFPNAdd, self).get_config()
config.update({
'epsilon': self.epsilon
})
return config
|
wBiFPNAdd
|
error.rs
|
use agsol_common::SignerPdaError;
#[cfg(feature = "test-bpf")]
use num_derive::FromPrimitive;
use solana_program::program_error::ProgramError;
#[derive(Debug, PartialEq)]
#[cfg_attr(feature = "test-bpf", derive(FromPrimitive))]
pub enum AuctionContractError {
InvalidInstruction = 500, // 1f4
AuctionCycleEnded = 501, // 1f5
AuctionFrozen = 502, // 1f6
AuctionAlreadyInitialized = 503, // 1f7
ContractAlreadyInitialized = 504, // 1f8
AuctionIsInProgress = 505, // 1f9
InvalidSeeds = 506, // 1fa
InvalidBidAmount = 507, // 1fb
AuctionOwnerMismatch = 508, // 1fc
InvalidStartTime = 509, // 1fd
TopBidderAccountMismatch = 510, // 1fe
MasterEditionMismatch = 511, // 1ff
ChildEditionNumberMismatch = 512, // 200
NftAlreadyExists = 513, // 201
InvalidClaimAmount = 514, // 202
AuctionEnded = 515, // 203
AuctionIdNotUnique = 516, // 204
ContractAdminMismatch = 517, // 205
AuctionIsActive = 518, // 206
MetadataManipulationError = 519, // 207
InvalidProgramAddress = 520, // 208
InvalidAccountOwner = 521, // 209
ArithmeticError = 522, // 20a
WithdrawAuthorityMismatch = 523, // 20b
AuctionPoolFull = 524, // 20c
ShrinkingPoolIsNotAllowed = 525, // 20d
InvalidMinimumBidAmount = 526, // 20e
InvalidPerCycleAmount = 527, // 20f
InvalidCyclePeriod = 528, // 210
AuctionIdNotAscii = 529, // 211
}
impl From<AuctionContractError> for ProgramError {
fn from(e: AuctionContractError) -> Self {
ProgramError::Custom(e as u32)
}
}
|
impl From<SignerPdaError> for AuctionContractError {
fn from(_: SignerPdaError) -> Self {
AuctionContractError::InvalidSeeds
}
}
| |
main.rs
|
// DO NOT EDIT !
// This file was generated automatically from 'src/mako/cli/main.rs.mako'
// DO NOT EDIT !
#![allow(unused_variables, unused_imports, dead_code, unused_mut)]
#[macro_use]
extern crate clap;
extern crate yup_oauth2 as oauth2;
extern crate yup_hyper_mock as mock;
extern crate hyper_rustls;
extern crate serde;
extern crate serde_json;
extern crate hyper;
extern crate mime;
extern crate strsim;
extern crate google_sql1_beta4 as api;
use std::env;
use std::io::{self, Write};
use clap::{App, SubCommand, Arg};
mod cmn;
use cmn::{InvalidOptionsError, CLIError, JsonTokenStorage, arg_from_str, writer_from_opts, parse_kv_arg,
input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol,
calltype_from_str, remove_json_null_values, ComplexType, JsonType, JsonTypeInfo};
use std::default::Default;
use std::str::FromStr;
use oauth2::{Authenticator, DefaultAuthenticatorDelegate, FlowType};
use serde_json as json;
use clap::ArgMatches;
enum DoitError {
IoError(String, io::Error),
ApiError(api::Error),
}
struct Engine<'n> {
opt: ArgMatches<'n>,
hub: api::SQLAdmin<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, JsonTokenStorage, hyper::Client>>,
gp: Vec<&'static str>,
gpm: Vec<(&'static str, &'static str)>,
}
impl<'n> Engine<'n> {
fn _backup_runs_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError>
|
fn _backup_runs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.backup_runs().get(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""), opt.value_of("id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _backup_runs_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"status" => Some(("status", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-configuration.kind" => Some(("diskEncryptionConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-configuration.kms-key-name" => Some(("diskEncryptionConfiguration.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"description" => Some(("description", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"window-start-time" => Some(("windowStartTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-status.kms-key-version-name" => Some(("diskEncryptionStatus.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-status.kind" => Some(("diskEncryptionStatus.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"instance" => Some(("instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"location" => Some(("location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"start-time" => Some(("startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"error.kind" => Some(("error.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"error.code" => Some(("error.code", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"error.message" => Some(("error.message", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"end-time" => Some(("endTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"type" => Some(("type", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"enqueued-time" => Some(("enqueuedTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["code", "description", "disk-encryption-configuration", "disk-encryption-status", "end-time", "enqueued-time", "error", "id", "instance", "kind", "kms-key-name", "kms-key-version-name", "location", "message", "self-link", "start-time", "status", "type", "window-start-time"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::BackupRun = json::value::from_value(object).unwrap();
let mut call = self.hub.backup_runs().insert(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _backup_runs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.backup_runs().list(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"max-results" => {
call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer"));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["page-token", "max-results"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _databases_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.databases().delete(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""), opt.value_of("database").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _databases_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.databases().get(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""), opt.value_of("database").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _databases_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"sqlserver-database-details.compatibility-level" => Some(("sqlserverDatabaseDetails.compatibilityLevel", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"sqlserver-database-details.recovery-model" => Some(("sqlserverDatabaseDetails.recoveryModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"charset" => Some(("charset", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"project" => Some(("project", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"instance" => Some(("instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"collation" => Some(("collation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["charset", "collation", "compatibility-level", "etag", "instance", "kind", "name", "project", "recovery-model", "self-link", "sqlserver-database-details"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::Database = json::value::from_value(object).unwrap();
let mut call = self.hub.databases().insert(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _databases_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.databases().list(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _databases_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"sqlserver-database-details.compatibility-level" => Some(("sqlserverDatabaseDetails.compatibilityLevel", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"sqlserver-database-details.recovery-model" => Some(("sqlserverDatabaseDetails.recoveryModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"charset" => Some(("charset", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"project" => Some(("project", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"instance" => Some(("instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"collation" => Some(("collation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["charset", "collation", "compatibility-level", "etag", "instance", "kind", "name", "project", "recovery-model", "self-link", "sqlserver-database-details"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::Database = json::value::from_value(object).unwrap();
let mut call = self.hub.databases().patch(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""), opt.value_of("database").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _databases_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"sqlserver-database-details.compatibility-level" => Some(("sqlserverDatabaseDetails.compatibilityLevel", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"sqlserver-database-details.recovery-model" => Some(("sqlserverDatabaseDetails.recoveryModel", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"charset" => Some(("charset", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"project" => Some(("project", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"instance" => Some(("instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"collation" => Some(("collation", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["charset", "collation", "compatibility-level", "etag", "instance", "kind", "name", "project", "recovery-model", "self-link", "sqlserver-database-details"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::Database = json::value::from_value(object).unwrap();
let mut call = self.hub.databases().update(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""), opt.value_of("database").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _flags_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.flags().list();
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"database-version" => {
call = call.database_version(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["database-version"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_add_server_ca(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.instances().add_server_ca(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_clone(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"clone-context.bin-log-coordinates.bin-log-position" => Some(("cloneContext.binLogCoordinates.binLogPosition", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"clone-context.bin-log-coordinates.kind" => Some(("cloneContext.binLogCoordinates.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"clone-context.bin-log-coordinates.bin-log-file-name" => Some(("cloneContext.binLogCoordinates.binLogFileName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"clone-context.kind" => Some(("cloneContext.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"clone-context.point-in-time" => Some(("cloneContext.pointInTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"clone-context.pitr-timestamp-ms" => Some(("cloneContext.pitrTimestampMs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"clone-context.destination-instance-name" => Some(("cloneContext.destinationInstanceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["bin-log-coordinates", "bin-log-file-name", "bin-log-position", "clone-context", "destination-instance-name", "kind", "pitr-timestamp-ms", "point-in-time"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::InstancesCloneRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.instances().clone(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.instances().delete(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_demote_master(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"demote-master-context.kind" => Some(("demoteMasterContext.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"demote-master-context.master-instance-name" => Some(("demoteMasterContext.masterInstanceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"demote-master-context.verify-gtid-consistency" => Some(("demoteMasterContext.verifyGtidConsistency", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"demote-master-context.replica-configuration.kind" => Some(("demoteMasterContext.replicaConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"demote-master-context.replica-configuration.mysql-replica-configuration.username" => Some(("demoteMasterContext.replicaConfiguration.mysqlReplicaConfiguration.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"demote-master-context.replica-configuration.mysql-replica-configuration.kind" => Some(("demoteMasterContext.replicaConfiguration.mysqlReplicaConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"demote-master-context.replica-configuration.mysql-replica-configuration.client-key" => Some(("demoteMasterContext.replicaConfiguration.mysqlReplicaConfiguration.clientKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"demote-master-context.replica-configuration.mysql-replica-configuration.ca-certificate" => Some(("demoteMasterContext.replicaConfiguration.mysqlReplicaConfiguration.caCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"demote-master-context.replica-configuration.mysql-replica-configuration.client-certificate" => Some(("demoteMasterContext.replicaConfiguration.mysqlReplicaConfiguration.clientCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"demote-master-context.replica-configuration.mysql-replica-configuration.password" => Some(("demoteMasterContext.replicaConfiguration.mysqlReplicaConfiguration.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["ca-certificate", "client-certificate", "client-key", "demote-master-context", "kind", "master-instance-name", "mysql-replica-configuration", "password", "replica-configuration", "username", "verify-gtid-consistency"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::InstancesDemoteMasterRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.instances().demote_master(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_export(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"export-context.kind" => Some(("exportContext.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"export-context.file-type" => Some(("exportContext.fileType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"export-context.uri" => Some(("exportContext.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"export-context.csv-export-options.select-query" => Some(("exportContext.csvExportOptions.selectQuery", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"export-context.databases" => Some(("exportContext.databases", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"export-context.sql-export-options.tables" => Some(("exportContext.sqlExportOptions.tables", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"export-context.sql-export-options.mysql-export-options.master-data" => Some(("exportContext.sqlExportOptions.mysqlExportOptions.masterData", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"export-context.sql-export-options.schema-only" => Some(("exportContext.sqlExportOptions.schemaOnly", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["csv-export-options", "databases", "export-context", "file-type", "kind", "master-data", "mysql-export-options", "schema-only", "select-query", "sql-export-options", "tables", "uri"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::InstancesExportRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.instances().export(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_failover(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"failover-context.kind" => Some(("failoverContext.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"failover-context.settings-version" => Some(("failoverContext.settingsVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["failover-context", "kind", "settings-version"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::InstancesFailoverRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.instances().failover(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.instances().get(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_import(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"import-context.bak-import-options.encryption-options.pvk-path" => Some(("importContext.bakImportOptions.encryptionOptions.pvkPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"import-context.bak-import-options.encryption-options.cert-path" => Some(("importContext.bakImportOptions.encryptionOptions.certPath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"import-context.bak-import-options.encryption-options.pvk-password" => Some(("importContext.bakImportOptions.encryptionOptions.pvkPassword", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"import-context.kind" => Some(("importContext.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"import-context.database" => Some(("importContext.database", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"import-context.file-type" => Some(("importContext.fileType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"import-context.uri" => Some(("importContext.uri", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"import-context.import-user" => Some(("importContext.importUser", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"import-context.csv-import-options.table" => Some(("importContext.csvImportOptions.table", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"import-context.csv-import-options.columns" => Some(("importContext.csvImportOptions.columns", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["bak-import-options", "cert-path", "columns", "csv-import-options", "database", "encryption-options", "file-type", "import-context", "import-user", "kind", "pvk-password", "pvk-path", "table", "uri"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::InstancesImportRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.instances().import(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"backend-type" => Some(("backendType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"current-disk-size" => Some(("currentDiskSize", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"service-account-email-address" => Some(("serviceAccountEmailAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"database-version" => Some(("databaseVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"instance-type" => Some(("instanceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"max-disk-size" => Some(("maxDiskSize", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-configuration.kind" => Some(("diskEncryptionConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-configuration.kms-key-name" => Some(("diskEncryptionConfiguration.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"scheduled-maintenance.start-time" => Some(("scheduledMaintenance.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"scheduled-maintenance.can-defer" => Some(("scheduledMaintenance.canDefer", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"scheduled-maintenance.can-reschedule" => Some(("scheduledMaintenance.canReschedule", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"suspension-reason" => Some(("suspensionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"master-instance-name" => Some(("masterInstanceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-status.kms-key-version-name" => Some(("diskEncryptionStatus.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-status.kind" => Some(("diskEncryptionStatus.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"gce-zone" => Some(("gceZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"failover-replica.available" => Some(("failoverReplica.available", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"failover-replica.name" => Some(("failoverReplica.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-names" => Some(("replicaNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"on-premises-configuration.username" => Some(("onPremisesConfiguration.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.kind" => Some(("onPremisesConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.password" => Some(("onPremisesConfiguration.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.ca-certificate" => Some(("onPremisesConfiguration.caCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.client-certificate" => Some(("onPremisesConfiguration.clientCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.dump-file-path" => Some(("onPremisesConfiguration.dumpFilePath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.host-port" => Some(("onPremisesConfiguration.hostPort", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.client-key" => Some(("onPremisesConfiguration.clientKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"connection-name" => Some(("connectionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"ipv6-address" => Some(("ipv6Address", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.cert-serial-number" => Some(("serverCaCert.certSerialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.kind" => Some(("serverCaCert.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.sha1-fingerprint" => Some(("serverCaCert.sha1Fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.common-name" => Some(("serverCaCert.commonName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.instance" => Some(("serverCaCert.instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.cert" => Some(("serverCaCert.cert", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.expiration-time" => Some(("serverCaCert.expirationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.create-time" => Some(("serverCaCert.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.self-link" => Some(("serverCaCert.selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.kind" => Some(("settings.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.data-disk-type" => Some(("settings.dataDiskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.availability-type" => Some(("settings.availabilityType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.maintenance-window.kind" => Some(("settings.maintenanceWindow.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.maintenance-window.update-track" => Some(("settings.maintenanceWindow.updateTrack", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.maintenance-window.day" => Some(("settings.maintenanceWindow.day", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"settings.maintenance-window.hour" => Some(("settings.maintenanceWindow.hour", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"settings.authorized-gae-applications" => Some(("settings.authorizedGaeApplications", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"settings.activation-policy" => Some(("settings.activationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.backup-configuration.kind" => Some(("settings.backupConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.backup-configuration.enabled" => Some(("settings.backupConfiguration.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.backup-configuration.replication-log-archiving-enabled" => Some(("settings.backupConfiguration.replicationLogArchivingEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.backup-configuration.binary-log-enabled" => Some(("settings.backupConfiguration.binaryLogEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.backup-configuration.location" => Some(("settings.backupConfiguration.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.backup-configuration.start-time" => Some(("settings.backupConfiguration.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.backup-configuration.point-in-time-recovery-enabled" => Some(("settings.backupConfiguration.pointInTimeRecoveryEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.ip-configuration.ipv4-enabled" => Some(("settings.ipConfiguration.ipv4Enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.ip-configuration.require-ssl" => Some(("settings.ipConfiguration.requireSsl", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.ip-configuration.private-network" => Some(("settings.ipConfiguration.privateNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.crash-safe-replication-enabled" => Some(("settings.crashSafeReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.user-labels" => Some(("settings.userLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })),
"settings.database-replication-enabled" => Some(("settings.databaseReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.replication-type" => Some(("settings.replicationType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.storage-auto-resize-limit" => Some(("settings.storageAutoResizeLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.tier" => Some(("settings.tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.pricing-plan" => Some(("settings.pricingPlan", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.settings-version" => Some(("settings.settingsVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.storage-auto-resize" => Some(("settings.storageAutoResize", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.location-preference.kind" => Some(("settings.locationPreference.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.location-preference.zone" => Some(("settings.locationPreference.zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.location-preference.follow-gae-application" => Some(("settings.locationPreference.followGaeApplication", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.data-disk-size-gb" => Some(("settings.dataDiskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"project" => Some(("project", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.kind" => Some(("replicaConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.failover-target" => Some(("replicaConfiguration.failoverTarget", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.username" => Some(("replicaConfiguration.mysqlReplicaConfiguration.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.kind" => Some(("replicaConfiguration.mysqlReplicaConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.connect-retry-interval" => Some(("replicaConfiguration.mysqlReplicaConfiguration.connectRetryInterval", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.ssl-cipher" => Some(("replicaConfiguration.mysqlReplicaConfiguration.sslCipher", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.ca-certificate" => Some(("replicaConfiguration.mysqlReplicaConfiguration.caCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.client-certificate" => Some(("replicaConfiguration.mysqlReplicaConfiguration.clientCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.master-heartbeat-period" => Some(("replicaConfiguration.mysqlReplicaConfiguration.masterHeartbeatPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.verify-server-certificate" => Some(("replicaConfiguration.mysqlReplicaConfiguration.verifyServerCertificate", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.dump-file-path" => Some(("replicaConfiguration.mysqlReplicaConfiguration.dumpFilePath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.password" => Some(("replicaConfiguration.mysqlReplicaConfiguration.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.client-key" => Some(("replicaConfiguration.mysqlReplicaConfiguration.clientKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"root-password" => Some(("rootPassword", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "authorized-gae-applications", "availability-type", "available", "backend-type", "backup-configuration", "binary-log-enabled", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "common-name", "connect-retry-interval", "connection-name", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-replication-enabled", "database-version", "day", "disk-encryption-configuration", "disk-encryption-status", "dump-file-path", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "mysql-replica-configuration", "name", "on-premises-configuration", "password", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "root-password", "scheduled-maintenance", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "update-track", "user-labels", "username", "verify-server-certificate", "zone"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::DatabaseInstance = json::value::from_value(object).unwrap();
let mut call = self.hub.instances().insert(request, opt.value_of("project").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.instances().list(opt.value_of("project").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"max-results" => {
call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer"));
},
"filter" => {
call = call.filter(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["filter", "page-token", "max-results"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_list_server_cas(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.instances().list_server_cas(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"backend-type" => Some(("backendType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"current-disk-size" => Some(("currentDiskSize", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"service-account-email-address" => Some(("serviceAccountEmailAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"database-version" => Some(("databaseVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"instance-type" => Some(("instanceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"max-disk-size" => Some(("maxDiskSize", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-configuration.kind" => Some(("diskEncryptionConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-configuration.kms-key-name" => Some(("diskEncryptionConfiguration.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"scheduled-maintenance.start-time" => Some(("scheduledMaintenance.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"scheduled-maintenance.can-defer" => Some(("scheduledMaintenance.canDefer", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"scheduled-maintenance.can-reschedule" => Some(("scheduledMaintenance.canReschedule", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"suspension-reason" => Some(("suspensionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"master-instance-name" => Some(("masterInstanceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-status.kms-key-version-name" => Some(("diskEncryptionStatus.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-status.kind" => Some(("diskEncryptionStatus.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"gce-zone" => Some(("gceZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"failover-replica.available" => Some(("failoverReplica.available", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"failover-replica.name" => Some(("failoverReplica.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-names" => Some(("replicaNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"on-premises-configuration.username" => Some(("onPremisesConfiguration.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.kind" => Some(("onPremisesConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.password" => Some(("onPremisesConfiguration.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.ca-certificate" => Some(("onPremisesConfiguration.caCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.client-certificate" => Some(("onPremisesConfiguration.clientCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.dump-file-path" => Some(("onPremisesConfiguration.dumpFilePath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.host-port" => Some(("onPremisesConfiguration.hostPort", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.client-key" => Some(("onPremisesConfiguration.clientKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"connection-name" => Some(("connectionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"ipv6-address" => Some(("ipv6Address", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.cert-serial-number" => Some(("serverCaCert.certSerialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.kind" => Some(("serverCaCert.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.sha1-fingerprint" => Some(("serverCaCert.sha1Fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.common-name" => Some(("serverCaCert.commonName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.instance" => Some(("serverCaCert.instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.cert" => Some(("serverCaCert.cert", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.expiration-time" => Some(("serverCaCert.expirationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.create-time" => Some(("serverCaCert.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.self-link" => Some(("serverCaCert.selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.kind" => Some(("settings.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.data-disk-type" => Some(("settings.dataDiskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.availability-type" => Some(("settings.availabilityType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.maintenance-window.kind" => Some(("settings.maintenanceWindow.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.maintenance-window.update-track" => Some(("settings.maintenanceWindow.updateTrack", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.maintenance-window.day" => Some(("settings.maintenanceWindow.day", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"settings.maintenance-window.hour" => Some(("settings.maintenanceWindow.hour", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"settings.authorized-gae-applications" => Some(("settings.authorizedGaeApplications", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"settings.activation-policy" => Some(("settings.activationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.backup-configuration.kind" => Some(("settings.backupConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.backup-configuration.enabled" => Some(("settings.backupConfiguration.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.backup-configuration.replication-log-archiving-enabled" => Some(("settings.backupConfiguration.replicationLogArchivingEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.backup-configuration.binary-log-enabled" => Some(("settings.backupConfiguration.binaryLogEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.backup-configuration.location" => Some(("settings.backupConfiguration.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.backup-configuration.start-time" => Some(("settings.backupConfiguration.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.backup-configuration.point-in-time-recovery-enabled" => Some(("settings.backupConfiguration.pointInTimeRecoveryEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.ip-configuration.ipv4-enabled" => Some(("settings.ipConfiguration.ipv4Enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.ip-configuration.require-ssl" => Some(("settings.ipConfiguration.requireSsl", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.ip-configuration.private-network" => Some(("settings.ipConfiguration.privateNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.crash-safe-replication-enabled" => Some(("settings.crashSafeReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.user-labels" => Some(("settings.userLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })),
"settings.database-replication-enabled" => Some(("settings.databaseReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.replication-type" => Some(("settings.replicationType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.storage-auto-resize-limit" => Some(("settings.storageAutoResizeLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.tier" => Some(("settings.tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.pricing-plan" => Some(("settings.pricingPlan", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.settings-version" => Some(("settings.settingsVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.storage-auto-resize" => Some(("settings.storageAutoResize", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.location-preference.kind" => Some(("settings.locationPreference.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.location-preference.zone" => Some(("settings.locationPreference.zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.location-preference.follow-gae-application" => Some(("settings.locationPreference.followGaeApplication", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.data-disk-size-gb" => Some(("settings.dataDiskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"project" => Some(("project", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.kind" => Some(("replicaConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.failover-target" => Some(("replicaConfiguration.failoverTarget", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.username" => Some(("replicaConfiguration.mysqlReplicaConfiguration.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.kind" => Some(("replicaConfiguration.mysqlReplicaConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.connect-retry-interval" => Some(("replicaConfiguration.mysqlReplicaConfiguration.connectRetryInterval", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.ssl-cipher" => Some(("replicaConfiguration.mysqlReplicaConfiguration.sslCipher", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.ca-certificate" => Some(("replicaConfiguration.mysqlReplicaConfiguration.caCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.client-certificate" => Some(("replicaConfiguration.mysqlReplicaConfiguration.clientCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.master-heartbeat-period" => Some(("replicaConfiguration.mysqlReplicaConfiguration.masterHeartbeatPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.verify-server-certificate" => Some(("replicaConfiguration.mysqlReplicaConfiguration.verifyServerCertificate", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.dump-file-path" => Some(("replicaConfiguration.mysqlReplicaConfiguration.dumpFilePath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.password" => Some(("replicaConfiguration.mysqlReplicaConfiguration.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.client-key" => Some(("replicaConfiguration.mysqlReplicaConfiguration.clientKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"root-password" => Some(("rootPassword", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "authorized-gae-applications", "availability-type", "available", "backend-type", "backup-configuration", "binary-log-enabled", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "common-name", "connect-retry-interval", "connection-name", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-replication-enabled", "database-version", "day", "disk-encryption-configuration", "disk-encryption-status", "dump-file-path", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "mysql-replica-configuration", "name", "on-premises-configuration", "password", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "root-password", "scheduled-maintenance", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "update-track", "user-labels", "username", "verify-server-certificate", "zone"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::DatabaseInstance = json::value::from_value(object).unwrap();
let mut call = self.hub.instances().patch(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_promote_replica(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.instances().promote_replica(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_reset_ssl_config(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.instances().reset_ssl_config(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_restart(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.instances().restart(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_restore_backup(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"restore-backup-context.instance-id" => Some(("restoreBackupContext.instanceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"restore-backup-context.project" => Some(("restoreBackupContext.project", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"restore-backup-context.kind" => Some(("restoreBackupContext.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"restore-backup-context.backup-run-id" => Some(("restoreBackupContext.backupRunId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["backup-run-id", "instance-id", "kind", "project", "restore-backup-context"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::InstancesRestoreBackupRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.instances().restore_backup(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_rotate_server_ca(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"rotate-server-ca-context.kind" => Some(("rotateServerCaContext.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"rotate-server-ca-context.next-version" => Some(("rotateServerCaContext.nextVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["kind", "next-version", "rotate-server-ca-context"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::InstancesRotateServerCaRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.instances().rotate_server_ca(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_start_replica(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.instances().start_replica(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_stop_replica(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.instances().stop_replica(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_truncate_log(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"truncate-log-context.log-type" => Some(("truncateLogContext.logType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"truncate-log-context.kind" => Some(("truncateLogContext.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["kind", "log-type", "truncate-log-context"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::InstancesTruncateLogRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.instances().truncate_log(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _instances_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"backend-type" => Some(("backendType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"current-disk-size" => Some(("currentDiskSize", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"service-account-email-address" => Some(("serviceAccountEmailAddress", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"database-version" => Some(("databaseVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"instance-type" => Some(("instanceType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"max-disk-size" => Some(("maxDiskSize", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-configuration.kind" => Some(("diskEncryptionConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-configuration.kms-key-name" => Some(("diskEncryptionConfiguration.kmsKeyName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"scheduled-maintenance.start-time" => Some(("scheduledMaintenance.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"scheduled-maintenance.can-defer" => Some(("scheduledMaintenance.canDefer", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"scheduled-maintenance.can-reschedule" => Some(("scheduledMaintenance.canReschedule", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"suspension-reason" => Some(("suspensionReason", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"master-instance-name" => Some(("masterInstanceName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-status.kms-key-version-name" => Some(("diskEncryptionStatus.kmsKeyVersionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"disk-encryption-status.kind" => Some(("diskEncryptionStatus.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"state" => Some(("state", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"gce-zone" => Some(("gceZone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"failover-replica.available" => Some(("failoverReplica.available", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"failover-replica.name" => Some(("failoverReplica.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-names" => Some(("replicaNames", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"on-premises-configuration.username" => Some(("onPremisesConfiguration.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.kind" => Some(("onPremisesConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.password" => Some(("onPremisesConfiguration.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.ca-certificate" => Some(("onPremisesConfiguration.caCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.client-certificate" => Some(("onPremisesConfiguration.clientCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.dump-file-path" => Some(("onPremisesConfiguration.dumpFilePath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.host-port" => Some(("onPremisesConfiguration.hostPort", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"on-premises-configuration.client-key" => Some(("onPremisesConfiguration.clientKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"connection-name" => Some(("connectionName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"ipv6-address" => Some(("ipv6Address", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.cert-serial-number" => Some(("serverCaCert.certSerialNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.kind" => Some(("serverCaCert.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.sha1-fingerprint" => Some(("serverCaCert.sha1Fingerprint", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.common-name" => Some(("serverCaCert.commonName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.instance" => Some(("serverCaCert.instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.cert" => Some(("serverCaCert.cert", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.expiration-time" => Some(("serverCaCert.expirationTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.create-time" => Some(("serverCaCert.createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"server-ca-cert.self-link" => Some(("serverCaCert.selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"region" => Some(("region", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.kind" => Some(("settings.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.data-disk-type" => Some(("settings.dataDiskType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.availability-type" => Some(("settings.availabilityType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.maintenance-window.kind" => Some(("settings.maintenanceWindow.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.maintenance-window.update-track" => Some(("settings.maintenanceWindow.updateTrack", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.maintenance-window.day" => Some(("settings.maintenanceWindow.day", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"settings.maintenance-window.hour" => Some(("settings.maintenanceWindow.hour", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"settings.authorized-gae-applications" => Some(("settings.authorizedGaeApplications", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"settings.activation-policy" => Some(("settings.activationPolicy", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.backup-configuration.kind" => Some(("settings.backupConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.backup-configuration.enabled" => Some(("settings.backupConfiguration.enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.backup-configuration.replication-log-archiving-enabled" => Some(("settings.backupConfiguration.replicationLogArchivingEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.backup-configuration.binary-log-enabled" => Some(("settings.backupConfiguration.binaryLogEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.backup-configuration.location" => Some(("settings.backupConfiguration.location", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.backup-configuration.start-time" => Some(("settings.backupConfiguration.startTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.backup-configuration.point-in-time-recovery-enabled" => Some(("settings.backupConfiguration.pointInTimeRecoveryEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.ip-configuration.ipv4-enabled" => Some(("settings.ipConfiguration.ipv4Enabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.ip-configuration.require-ssl" => Some(("settings.ipConfiguration.requireSsl", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.ip-configuration.private-network" => Some(("settings.ipConfiguration.privateNetwork", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.crash-safe-replication-enabled" => Some(("settings.crashSafeReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.user-labels" => Some(("settings.userLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Map })),
"settings.database-replication-enabled" => Some(("settings.databaseReplicationEnabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.replication-type" => Some(("settings.replicationType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.storage-auto-resize-limit" => Some(("settings.storageAutoResizeLimit", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.tier" => Some(("settings.tier", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.pricing-plan" => Some(("settings.pricingPlan", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.settings-version" => Some(("settings.settingsVersion", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.storage-auto-resize" => Some(("settings.storageAutoResize", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"settings.location-preference.kind" => Some(("settings.locationPreference.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.location-preference.zone" => Some(("settings.locationPreference.zone", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.location-preference.follow-gae-application" => Some(("settings.locationPreference.followGaeApplication", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"settings.data-disk-size-gb" => Some(("settings.dataDiskSizeGb", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"project" => Some(("project", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.kind" => Some(("replicaConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.failover-target" => Some(("replicaConfiguration.failoverTarget", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.username" => Some(("replicaConfiguration.mysqlReplicaConfiguration.username", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.kind" => Some(("replicaConfiguration.mysqlReplicaConfiguration.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.connect-retry-interval" => Some(("replicaConfiguration.mysqlReplicaConfiguration.connectRetryInterval", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.ssl-cipher" => Some(("replicaConfiguration.mysqlReplicaConfiguration.sslCipher", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.ca-certificate" => Some(("replicaConfiguration.mysqlReplicaConfiguration.caCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.client-certificate" => Some(("replicaConfiguration.mysqlReplicaConfiguration.clientCertificate", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.master-heartbeat-period" => Some(("replicaConfiguration.mysqlReplicaConfiguration.masterHeartbeatPeriod", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.verify-server-certificate" => Some(("replicaConfiguration.mysqlReplicaConfiguration.verifyServerCertificate", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.dump-file-path" => Some(("replicaConfiguration.mysqlReplicaConfiguration.dumpFilePath", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.password" => Some(("replicaConfiguration.mysqlReplicaConfiguration.password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"replica-configuration.mysql-replica-configuration.client-key" => Some(("replicaConfiguration.mysqlReplicaConfiguration.clientKey", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"root-password" => Some(("rootPassword", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"self-link" => Some(("selfLink", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["activation-policy", "authorized-gae-applications", "availability-type", "available", "backend-type", "backup-configuration", "binary-log-enabled", "ca-certificate", "can-defer", "can-reschedule", "cert", "cert-serial-number", "client-certificate", "client-key", "common-name", "connect-retry-interval", "connection-name", "crash-safe-replication-enabled", "create-time", "current-disk-size", "data-disk-size-gb", "data-disk-type", "database-replication-enabled", "database-version", "day", "disk-encryption-configuration", "disk-encryption-status", "dump-file-path", "enabled", "etag", "expiration-time", "failover-replica", "failover-target", "follow-gae-application", "gce-zone", "host-port", "hour", "instance", "instance-type", "ip-configuration", "ipv4-enabled", "ipv6-address", "kind", "kms-key-name", "kms-key-version-name", "location", "location-preference", "maintenance-window", "master-heartbeat-period", "master-instance-name", "max-disk-size", "mysql-replica-configuration", "name", "on-premises-configuration", "password", "point-in-time-recovery-enabled", "pricing-plan", "private-network", "project", "region", "replica-configuration", "replica-names", "replication-log-archiving-enabled", "replication-type", "require-ssl", "root-password", "scheduled-maintenance", "self-link", "server-ca-cert", "service-account-email-address", "settings", "settings-version", "sha1-fingerprint", "ssl-cipher", "start-time", "state", "storage-auto-resize", "storage-auto-resize-limit", "suspension-reason", "tier", "update-track", "user-labels", "username", "verify-server-certificate", "zone"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::DatabaseInstance = json::value::from_value(object).unwrap();
let mut call = self.hub.instances().update(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.operations().get(opt.value_of("project").unwrap_or(""), opt.value_of("operation").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _operations_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.operations().list(opt.value_of("project").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"page-token" => {
call = call.page_token(value.unwrap_or(""));
},
"max-results" => {
call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer"));
},
"instance" => {
call = call.instance(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["page-token", "max-results", "instance"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _projects_instances_reschedule_maintenance(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"reschedule.schedule-time" => Some(("reschedule.scheduleTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"reschedule.reschedule-type" => Some(("reschedule.rescheduleType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["reschedule", "reschedule-type", "schedule-time"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::SqlInstancesRescheduleMaintenanceRequestBody = json::value::from_value(object).unwrap();
let mut call = self.hub.projects().instances_reschedule_maintenance(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _projects_instances_start_external_sync(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().instances_start_external_sync(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"sync-mode" => {
call = call.sync_mode(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["sync-mode"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _projects_instances_verify_external_sync_settings(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.projects().instances_verify_external_sync_settings(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"verify-connection-only" => {
call = call.verify_connection_only(arg_from_str(value.unwrap_or("false"), err, "verify-connection-only", "boolean"));
},
"sync-mode" => {
call = call.sync_mode(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["sync-mode", "verify-connection-only"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _ssl_certs_create_ephemeral(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"public-key" => Some(("public_key", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["public-key"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::SslCertsCreateEphemeralRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.ssl_certs().create_ephemeral(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _ssl_certs_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.ssl_certs().delete(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""), opt.value_of("sha1-fingerprint").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _ssl_certs_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.ssl_certs().get(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""), opt.value_of("sha1-fingerprint").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _ssl_certs_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"common-name" => Some(("commonName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["common-name"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::SslCertsInsertRequest = json::value::from_value(object).unwrap();
let mut call = self.hub.ssl_certs().insert(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _ssl_certs_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.ssl_certs().list(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _tiers_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.tiers().list(opt.value_of("project").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _users_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.users().delete(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"name" => {
call = call.name(value.unwrap_or(""));
},
"host" => {
call = call.host(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["host", "name"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _users_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"project" => Some(("project", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"instance" => Some(("instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"host" => Some(("host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"sqlserver-user-details.disabled" => Some(("sqlserverUserDetails.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"sqlserver-user-details.server-roles" => Some(("sqlserverUserDetails.serverRoles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"password" => Some(("password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["disabled", "etag", "host", "instance", "kind", "name", "password", "project", "server-roles", "sqlserver-user-details"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::User = json::value::from_value(object).unwrap();
let mut call = self.hub.users().insert(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _users_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut call = self.hub.users().list(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _users_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError)
-> Result<(), DoitError> {
let mut field_cursor = FieldCursor::default();
let mut object = json::value::Value::Object(Default::default());
for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let last_errc = err.issues.len();
let (key, value) = parse_kv_arg(&*kvarg, err, false);
let mut temp_cursor = field_cursor.clone();
if let Err(field_err) = temp_cursor.set(&*key) {
err.issues.push(field_err);
}
if value.is_none() {
field_cursor = temp_cursor.clone();
if err.issues.len() > last_errc {
err.issues.remove(last_errc);
}
continue;
}
let type_info: Option<(&'static str, JsonTypeInfo)> =
match &temp_cursor.to_string()[..] {
"kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"project" => Some(("project", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"instance" => Some(("instance", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"host" => Some(("host", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"etag" => Some(("etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
"sqlserver-user-details.disabled" => Some(("sqlserverUserDetails.disabled", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })),
"sqlserver-user-details.server-roles" => Some(("sqlserverUserDetails.serverRoles", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })),
"password" => Some(("password", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })),
_ => {
let suggestion = FieldCursor::did_you_mean(key, &vec!["disabled", "etag", "host", "instance", "kind", "name", "password", "project", "server-roles", "sqlserver-user-details"]);
err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string()))));
None
}
};
if let Some((field_cursor_str, type_info)) = type_info {
FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor);
}
}
let mut request: api::User = json::value::from_value(object).unwrap();
let mut call = self.hub.users().update(request, opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
"name" => {
call = call.name(value.unwrap_or(""));
},
"host" => {
call = call.host(value.unwrap_or(""));
},
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v.extend(["host", "name"].iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
fn _doit(&self, dry_run: bool) -> Result<Result<(), DoitError>, Option<InvalidOptionsError>> {
let mut err = InvalidOptionsError::new();
let mut call_result: Result<(), DoitError> = Ok(());
let mut err_opt: Option<InvalidOptionsError> = None;
match self.opt.subcommand() {
("backup-runs", Some(opt)) => {
match opt.subcommand() {
("delete", Some(opt)) => {
call_result = self._backup_runs_delete(opt, dry_run, &mut err);
},
("get", Some(opt)) => {
call_result = self._backup_runs_get(opt, dry_run, &mut err);
},
("insert", Some(opt)) => {
call_result = self._backup_runs_insert(opt, dry_run, &mut err);
},
("list", Some(opt)) => {
call_result = self._backup_runs_list(opt, dry_run, &mut err);
},
_ => {
err.issues.push(CLIError::MissingMethodError("backup-runs".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
("databases", Some(opt)) => {
match opt.subcommand() {
("delete", Some(opt)) => {
call_result = self._databases_delete(opt, dry_run, &mut err);
},
("get", Some(opt)) => {
call_result = self._databases_get(opt, dry_run, &mut err);
},
("insert", Some(opt)) => {
call_result = self._databases_insert(opt, dry_run, &mut err);
},
("list", Some(opt)) => {
call_result = self._databases_list(opt, dry_run, &mut err);
},
("patch", Some(opt)) => {
call_result = self._databases_patch(opt, dry_run, &mut err);
},
("update", Some(opt)) => {
call_result = self._databases_update(opt, dry_run, &mut err);
},
_ => {
err.issues.push(CLIError::MissingMethodError("databases".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
("flags", Some(opt)) => {
match opt.subcommand() {
("list", Some(opt)) => {
call_result = self._flags_list(opt, dry_run, &mut err);
},
_ => {
err.issues.push(CLIError::MissingMethodError("flags".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
("instances", Some(opt)) => {
match opt.subcommand() {
("add-server-ca", Some(opt)) => {
call_result = self._instances_add_server_ca(opt, dry_run, &mut err);
},
("clone", Some(opt)) => {
call_result = self._instances_clone(opt, dry_run, &mut err);
},
("delete", Some(opt)) => {
call_result = self._instances_delete(opt, dry_run, &mut err);
},
("demote-master", Some(opt)) => {
call_result = self._instances_demote_master(opt, dry_run, &mut err);
},
("export", Some(opt)) => {
call_result = self._instances_export(opt, dry_run, &mut err);
},
("failover", Some(opt)) => {
call_result = self._instances_failover(opt, dry_run, &mut err);
},
("get", Some(opt)) => {
call_result = self._instances_get(opt, dry_run, &mut err);
},
("import", Some(opt)) => {
call_result = self._instances_import(opt, dry_run, &mut err);
},
("insert", Some(opt)) => {
call_result = self._instances_insert(opt, dry_run, &mut err);
},
("list", Some(opt)) => {
call_result = self._instances_list(opt, dry_run, &mut err);
},
("list-server-cas", Some(opt)) => {
call_result = self._instances_list_server_cas(opt, dry_run, &mut err);
},
("patch", Some(opt)) => {
call_result = self._instances_patch(opt, dry_run, &mut err);
},
("promote-replica", Some(opt)) => {
call_result = self._instances_promote_replica(opt, dry_run, &mut err);
},
("reset-ssl-config", Some(opt)) => {
call_result = self._instances_reset_ssl_config(opt, dry_run, &mut err);
},
("restart", Some(opt)) => {
call_result = self._instances_restart(opt, dry_run, &mut err);
},
("restore-backup", Some(opt)) => {
call_result = self._instances_restore_backup(opt, dry_run, &mut err);
},
("rotate-server-ca", Some(opt)) => {
call_result = self._instances_rotate_server_ca(opt, dry_run, &mut err);
},
("start-replica", Some(opt)) => {
call_result = self._instances_start_replica(opt, dry_run, &mut err);
},
("stop-replica", Some(opt)) => {
call_result = self._instances_stop_replica(opt, dry_run, &mut err);
},
("truncate-log", Some(opt)) => {
call_result = self._instances_truncate_log(opt, dry_run, &mut err);
},
("update", Some(opt)) => {
call_result = self._instances_update(opt, dry_run, &mut err);
},
_ => {
err.issues.push(CLIError::MissingMethodError("instances".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
("operations", Some(opt)) => {
match opt.subcommand() {
("get", Some(opt)) => {
call_result = self._operations_get(opt, dry_run, &mut err);
},
("list", Some(opt)) => {
call_result = self._operations_list(opt, dry_run, &mut err);
},
_ => {
err.issues.push(CLIError::MissingMethodError("operations".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
("projects", Some(opt)) => {
match opt.subcommand() {
("instances-reschedule-maintenance", Some(opt)) => {
call_result = self._projects_instances_reschedule_maintenance(opt, dry_run, &mut err);
},
("instances-start-external-sync", Some(opt)) => {
call_result = self._projects_instances_start_external_sync(opt, dry_run, &mut err);
},
("instances-verify-external-sync-settings", Some(opt)) => {
call_result = self._projects_instances_verify_external_sync_settings(opt, dry_run, &mut err);
},
_ => {
err.issues.push(CLIError::MissingMethodError("projects".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
("ssl-certs", Some(opt)) => {
match opt.subcommand() {
("create-ephemeral", Some(opt)) => {
call_result = self._ssl_certs_create_ephemeral(opt, dry_run, &mut err);
},
("delete", Some(opt)) => {
call_result = self._ssl_certs_delete(opt, dry_run, &mut err);
},
("get", Some(opt)) => {
call_result = self._ssl_certs_get(opt, dry_run, &mut err);
},
("insert", Some(opt)) => {
call_result = self._ssl_certs_insert(opt, dry_run, &mut err);
},
("list", Some(opt)) => {
call_result = self._ssl_certs_list(opt, dry_run, &mut err);
},
_ => {
err.issues.push(CLIError::MissingMethodError("ssl-certs".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
("tiers", Some(opt)) => {
match opt.subcommand() {
("list", Some(opt)) => {
call_result = self._tiers_list(opt, dry_run, &mut err);
},
_ => {
err.issues.push(CLIError::MissingMethodError("tiers".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
("users", Some(opt)) => {
match opt.subcommand() {
("delete", Some(opt)) => {
call_result = self._users_delete(opt, dry_run, &mut err);
},
("insert", Some(opt)) => {
call_result = self._users_insert(opt, dry_run, &mut err);
},
("list", Some(opt)) => {
call_result = self._users_list(opt, dry_run, &mut err);
},
("update", Some(opt)) => {
call_result = self._users_update(opt, dry_run, &mut err);
},
_ => {
err.issues.push(CLIError::MissingMethodError("users".to_string()));
writeln!(io::stderr(), "{}\n", opt.usage()).ok();
}
}
},
_ => {
err.issues.push(CLIError::MissingCommandError);
writeln!(io::stderr(), "{}\n", self.opt.usage()).ok();
}
}
if dry_run {
if err.issues.len() > 0 {
err_opt = Some(err);
}
Err(err_opt)
} else {
Ok(call_result)
}
}
// Please note that this call will fail if any part of the opt can't be handled
fn new(opt: ArgMatches<'n>) -> Result<Engine<'n>, InvalidOptionsError> {
let (config_dir, secret) = {
let config_dir = match cmn::assure_config_dir_exists(opt.value_of("folder").unwrap_or("~/.google-service-cli")) {
Err(e) => return Err(InvalidOptionsError::single(e, 3)),
Ok(p) => p,
};
match cmn::application_secret_from_directory(&config_dir, "sql1-beta4-secret.json",
"{\"installed\":{\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\",\"client_secret\":\"hCsslbCUyfehWMmbkG8vTYxG\",\"token_uri\":\"https://accounts.google.com/o/oauth2/token\",\"client_email\":\"\",\"redirect_uris\":[\"urn:ietf:wg:oauth:2.0:oob\",\"oob\"],\"client_x509_cert_url\":\"\",\"client_id\":\"620010449518-9ngf7o4dhs0dka470npqvor6dc5lqb9b.apps.googleusercontent.com\",\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"}}") {
Ok(secret) => (config_dir, secret),
Err(e) => return Err(InvalidOptionsError::single(e, 4))
}
};
let auth = Authenticator::new( &secret, DefaultAuthenticatorDelegate,
if opt.is_present("debug-auth") {
hyper::Client::with_connector(mock::TeeConnector {
connector: hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new())
})
} else {
hyper::Client::with_connector(hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new()))
},
JsonTokenStorage {
program_name: "sql1-beta4",
db_dir: config_dir.clone(),
}, Some(FlowType::InstalledRedirect(54324)));
let client =
if opt.is_present("debug") {
hyper::Client::with_connector(mock::TeeConnector {
connector: hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new())
})
} else {
hyper::Client::with_connector(hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new()))
};
let engine = Engine {
opt: opt,
hub: api::SQLAdmin::new(client, auth),
gp: vec!["$-xgafv", "access-token", "alt", "callback", "fields", "key", "oauth-token", "pretty-print", "quota-user", "upload-type", "upload-protocol"],
gpm: vec![
("$-xgafv", "$.xgafv"),
("access-token", "access_token"),
("oauth-token", "oauth_token"),
("pretty-print", "prettyPrint"),
("quota-user", "quotaUser"),
("upload-type", "uploadType"),
("upload-protocol", "upload_protocol"),
]
};
match engine._doit(true) {
Err(Some(err)) => Err(err),
Err(None) => Ok(engine),
Ok(_) => unreachable!(),
}
}
fn doit(&self) -> Result<(), DoitError> {
match self._doit(false) {
Ok(res) => res,
Err(_) => unreachable!(),
}
}
}
fn main() {
let mut exit_status = 0i32;
let arg_data = [
("backup-runs", "methods: 'delete', 'get', 'insert' and 'list'", vec![
("delete",
Some(r##"Deletes the backup taken by a backup run."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/backup-runs_delete",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"id"##),
None,
Some(r##"The ID of the Backup Run to delete. To find a Backup Run ID, use the <a
href="/sql/docs/db_path/admin-api/rest/v1beta4/backupRuns/list">list</a>
method."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("get",
Some(r##"Retrieves a resource containing information about a backup run."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/backup-runs_get",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"id"##),
None,
Some(r##"The ID of this Backup Run."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("insert",
Some(r##"Creates a new backup run on demand. This method is applicable only to
Second Generation instances."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/backup-runs_insert",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("list",
Some(r##"Lists all backup runs associated with a given instance and configuration in
the reverse chronological order of the backup initiation time."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/backup-runs_list",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
("databases", "methods: 'delete', 'get', 'insert', 'list', 'patch' and 'update'", vec![
("delete",
Some(r##"Deletes a database from a Cloud SQL instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/databases_delete",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Database instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"database"##),
None,
Some(r##"Name of the database to be deleted in the instance."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("get",
Some(r##"Retrieves a resource containing information about a database inside a Cloud
SQL instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/databases_get",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Database instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"database"##),
None,
Some(r##"Name of the database in the instance."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("insert",
Some(r##"Inserts a resource containing information about a database inside a Cloud
SQL instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/databases_insert",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Database instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("list",
Some(r##"Lists databases in the specified Cloud SQL instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/databases_list",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("patch",
Some(r##"Partially updates a resource containing information about a database inside
a Cloud SQL instance. This method supports patch semantics."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/databases_patch",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Database instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"database"##),
None,
Some(r##"Name of the database to be updated in the instance."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("update",
Some(r##"Updates a resource containing information about a database inside a Cloud
SQL instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/databases_update",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Database instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"database"##),
None,
Some(r##"Name of the database to be updated in the instance."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
("flags", "methods: 'list'", vec![
("list",
Some(r##"List all available database flags for Cloud SQL instances."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/flags_list",
vec![
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
("instances", "methods: 'add-server-ca', 'clone', 'delete', 'demote-master', 'export', 'failover', 'get', 'import', 'insert', 'list', 'list-server-cas', 'patch', 'promote-replica', 'reset-ssl-config', 'restart', 'restore-backup', 'rotate-server-ca', 'start-replica', 'stop-replica', 'truncate-log' and 'update'", vec![
("add-server-ca",
Some(r##"Add a new trusted Certificate Authority (CA) version for the specified
instance. Required to prepare for a certificate rotation. If a CA version
was previously added but never used in a certificate rotation, this
operation replaces that version. There cannot be more than one CA version
waiting to be rotated in."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_add-server-ca",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("clone",
Some(r##"Creates a Cloud SQL instance as a clone of the source instance. Using this
operation might cause your instance to restart."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_clone",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the source as well as the clone Cloud SQL instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"The ID of the Cloud SQL instance to be cloned (source). This does not
include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("delete",
Some(r##"Deletes a Cloud SQL instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_delete",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance to be deleted."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("demote-master",
Some(r##"Demotes the stand-alone instance to be a Cloud SQL read replica for an
external database server."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_demote-master",
vec![
(Some(r##"project"##),
None,
Some(r##"ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance name."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("export",
Some(r##"Exports data from a Cloud SQL instance to a Cloud Storage bucket as a SQL
dump or CSV file."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_export",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance to be exported."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("failover",
Some(r##"Failover the instance to its failover replica instance. Using this
operation might cause your instance to restart."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_failover",
vec![
(Some(r##"project"##),
None,
Some(r##"ID of the project that contains the read replica."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("get",
Some(r##"Retrieves a resource containing information about a Cloud SQL instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_get",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Database instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("import",
Some(r##"Imports data into a Cloud SQL instance from a SQL dump or CSV file in
Cloud Storage."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_import",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("insert",
Some(r##"Creates a new Cloud SQL instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_insert",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project to which the newly created Cloud SQL instances
should belong."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("list",
Some(r##"Lists instances under a given project."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_list",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project for which to list Cloud SQL instances."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("list-server-cas",
Some(r##"Lists all of the trusted Certificate Authorities (CAs) for the specified
instance. There can be up to three CAs listed: the CA that was used to sign
the certificate that is currently in use, a CA that has been added but not
yet used to sign a certificate, and a CA used to sign a certificate that
has previously rotated out."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_list-server-cas",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("patch",
Some(r##"Updates settings of a Cloud SQL instance.
This method supports patch semantics."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_patch",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("promote-replica",
Some(r##"Promotes the read replica instance to be a stand-alone Cloud SQL instance.
Using this operation might cause your instance to restart."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_promote-replica",
vec![
(Some(r##"project"##),
None,
Some(r##"ID of the project that contains the read replica."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL read replica instance name."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("reset-ssl-config",
Some(r##"Deletes all client certificates and generates a new server SSL certificate
for the instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_reset-ssl-config",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("restart",
Some(r##"Restarts a Cloud SQL instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_restart",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance to be restarted."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("restore-backup",
Some(r##"Restores a backup of a Cloud SQL instance. Using this operation might cause
your instance to restart."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_restore-backup",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("rotate-server-ca",
Some(r##"Rotates the server certificate to one signed by the Certificate Authority
(CA) version previously added with the addServerCA method."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_rotate-server-ca",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("start-replica",
Some(r##"Starts the replication in the read replica instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_start-replica",
vec![
(Some(r##"project"##),
None,
Some(r##"ID of the project that contains the read replica."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL read replica instance name."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("stop-replica",
Some(r##"Stops the replication in the read replica instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_stop-replica",
vec![
(Some(r##"project"##),
None,
Some(r##"ID of the project that contains the read replica."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL read replica instance name."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("truncate-log",
Some(r##"Truncate MySQL general and slow query log tables"##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_truncate-log",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the Cloud SQL project."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("update",
Some(r##"Updates settings of a Cloud SQL instance. Using this operation might cause
your instance to restart."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/instances_update",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
("operations", "methods: 'get' and 'list'", vec![
("get",
Some(r##"Retrieves an instance operation that has been performed on an instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/operations_get",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"operation"##),
None,
Some(r##"Instance operation ID."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("list",
Some(r##"Lists all instance operations that have been performed on the given Cloud
SQL instance in the reverse chronological order of the start time."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/operations_list",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
("projects", "methods: 'instances-reschedule-maintenance', 'instances-start-external-sync' and 'instances-verify-external-sync-settings'", vec![
("instances-reschedule-maintenance",
Some(r##"Reschedules the maintenance on the given instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/projects_instances-reschedule-maintenance",
vec![
(Some(r##"project"##),
None,
Some(r##"ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("instances-start-external-sync",
Some(r##"Start External master migration."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/projects_instances-start-external-sync",
vec![
(Some(r##"project"##),
None,
Some(r##"ID of the project that contains the first generation instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("instances-verify-external-sync-settings",
Some(r##"Verify External master external sync settings."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/projects_instances-verify-external-sync-settings",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
("ssl-certs", "methods: 'create-ephemeral', 'delete', 'get', 'insert' and 'list'", vec![
("create-ephemeral",
Some(r##"Generates a short-lived X509 certificate containing the provided public key
and signed by a private key specific to the target instance. Users may use
the certificate to authenticate as themselves when connecting to the
database."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/ssl-certs_create-ephemeral",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the Cloud SQL project."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("delete",
Some(r##"Deletes the SSL certificate. For First Generation instances, the
certificate remains valid until the instance is restarted."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/ssl-certs_delete",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"sha1-fingerprint"##),
None,
Some(r##"Sha1 FingerPrint."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("get",
Some(r##"Retrieves a particular SSL certificate. Does not include the private key
(required for usage). The private key must be saved from the response to
initial creation."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/ssl-certs_get",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"sha1-fingerprint"##),
None,
Some(r##"Sha1 FingerPrint."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("insert",
Some(r##"Creates an SSL certificate and returns it along with the private key and
server certificate authority. The new certificate will not be usable until
the instance is restarted."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/ssl-certs_insert",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("list",
Some(r##"Lists all of the current SSL certificates for the instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/ssl-certs_list",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Cloud SQL instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
("tiers", "methods: 'list'", vec![
("list",
Some(r##"Lists all available machine types (tiers) for Cloud SQL, for example,
db-n1-standard-1. For related information, see <a
href="/sql/pricing">Pricing</a>."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/tiers_list",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project for which to list tiers."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
("users", "methods: 'delete', 'insert', 'list' and 'update'", vec![
("delete",
Some(r##"Deletes a user from a Cloud SQL instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/users_delete",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Database instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("insert",
Some(r##"Creates a new user in a Cloud SQL instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/users_insert",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Database instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("list",
Some(r##"Lists users in the specified Cloud SQL instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/users_list",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Database instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
("update",
Some(r##"Updates an existing user in a Cloud SQL instance."##),
"Details at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli/users_update",
vec![
(Some(r##"project"##),
None,
Some(r##"Project ID of the project that contains the instance."##),
Some(true),
Some(false)),
(Some(r##"instance"##),
None,
Some(r##"Database instance ID. This does not include the project ID."##),
Some(true),
Some(false)),
(Some(r##"kv"##),
Some(r##"r"##),
Some(r##"Set various fields of the request structure, matching the key=value form"##),
Some(true),
Some(true)),
(Some(r##"v"##),
Some(r##"p"##),
Some(r##"Set various optional parameters, matching the key=value form"##),
Some(false),
Some(true)),
(Some(r##"out"##),
Some(r##"o"##),
Some(r##"Specify the file into which to write the program's output"##),
Some(false),
Some(false)),
]),
]),
];
let mut app = App::new("sql1-beta4")
.author("Sebastian Thiel <[email protected]>")
.version("1.0.14+20200331")
.about("API for Cloud SQL database instance management")
.after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_sql1_beta4_cli")
.arg(Arg::with_name("url")
.long("scope")
.help("Specify the authentication a method should be executed in. Each scope requires the user to grant this application permission to use it.If unset, it defaults to the shortest scope url for a particular method.")
.multiple(true)
.takes_value(true))
.arg(Arg::with_name("folder")
.long("config-dir")
.help("A directory into which we will store our persistent data. Defaults to a user-writable directory that we will create during the first invocation.[default: ~/.google-service-cli")
.multiple(false)
.takes_value(true))
.arg(Arg::with_name("debug")
.long("debug")
.help("Output all server communication to standard error. `tx` and `rx` are placed into the same stream.")
.multiple(false)
.takes_value(false))
.arg(Arg::with_name("debug-auth")
.long("debug-auth")
.help("Output all communication related to authentication to standard error. `tx` and `rx` are placed into the same stream.")
.multiple(false)
.takes_value(false));
for &(main_command_name, about, ref subcommands) in arg_data.iter() {
let mut mcmd = SubCommand::with_name(main_command_name).about(about);
for &(sub_command_name, ref desc, url_info, ref args) in subcommands {
let mut scmd = SubCommand::with_name(sub_command_name);
if let &Some(desc) = desc {
scmd = scmd.about(desc);
}
scmd = scmd.after_help(url_info);
for &(ref arg_name, ref flag, ref desc, ref required, ref multi) in args {
let arg_name_str =
match (arg_name, flag) {
(&Some(an), _ ) => an,
(_ , &Some(f)) => f,
_ => unreachable!(),
};
let mut arg = Arg::with_name(arg_name_str)
.empty_values(false);
if let &Some(short_flag) = flag {
arg = arg.short(short_flag);
}
if let &Some(desc) = desc {
arg = arg.help(desc);
}
if arg_name.is_some() && flag.is_some() {
arg = arg.takes_value(true);
}
if let &Some(required) = required {
arg = arg.required(required);
}
if let &Some(multi) = multi {
arg = arg.multiple(multi);
}
scmd = scmd.arg(arg);
}
mcmd = mcmd.subcommand(scmd);
}
app = app.subcommand(mcmd);
}
let matches = app.get_matches();
let debug = matches.is_present("debug");
match Engine::new(matches) {
Err(err) => {
exit_status = err.exit_code;
writeln!(io::stderr(), "{}", err).ok();
},
Ok(engine) => {
if let Err(doit_err) = engine.doit() {
exit_status = 1;
match doit_err {
DoitError::IoError(path, err) => {
writeln!(io::stderr(), "Failed to open output file '{}': {}", path, err).ok();
},
DoitError::ApiError(err) => {
if debug {
writeln!(io::stderr(), "{:#?}", err).ok();
} else {
writeln!(io::stderr(), "{}", err).ok();
}
}
}
}
}
}
std::process::exit(exit_status);
}
|
{
let mut call = self.hub.backup_runs().delete(opt.value_of("project").unwrap_or(""), opt.value_of("instance").unwrap_or(""), opt.value_of("id").unwrap_or(""));
for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
let (key, value) = parse_kv_arg(&*parg, err, false);
match key {
_ => {
let mut found = false;
for param in &self.gp {
if key == *param {
found = true;
call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset"));
break;
}
}
if !found {
err.issues.push(CLIError::UnknownParameter(key.to_string(),
{let mut v = Vec::new();
v.extend(self.gp.iter().map(|v|*v));
v } ));
}
}
}
}
let protocol = CallType::Standard;
if dry_run {
Ok(())
} else {
assert!(err.issues.len() == 0);
for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() {
call = call.add_scope(scope);
}
let mut ostream = match writer_from_opts(opt.value_of("out")) {
Ok(mut f) => f,
Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)),
};
match match protocol {
CallType::Standard => call.doit(),
_ => unreachable!()
} {
Err(api_err) => Err(DoitError::ApiError(api_err)),
Ok((mut response, output_schema)) => {
let mut value = json::value::to_value(&output_schema).expect("serde to work");
remove_json_null_values(&mut value);
json::to_writer_pretty(&mut ostream, &value).unwrap();
ostream.flush().unwrap();
Ok(())
}
}
}
}
|
cluster_test.go
|
package config_test
import (
"testing"
"github.com/derailed/k9s/internal/config"
m "github.com/petergtz/pegomock"
"github.com/stretchr/testify/assert"
)
func TestClusterValidate(t *testing.T)
|
func TestClusterValidateEmpty(t *testing.T) {
setup(t)
ksMock := NewMockKubeSettings()
m.When(ksMock.NamespaceNames()).ThenReturn([]string{"ns1", "ns2", "default"}, nil)
var c config.Cluster
c.Validate(ksMock)
assert.Equal(t, "po", c.View.Active)
assert.Equal(t, "default", c.Namespace.Active)
assert.Equal(t, 1, len(c.Namespace.Favorites))
assert.Equal(t, []string{"default"}, c.Namespace.Favorites)
}
|
{
setup(t)
ksMock := NewMockKubeSettings()
m.When(ksMock.NamespaceNames()).ThenReturn([]string{"ns1", "ns2", "default"}, nil)
c := config.NewCluster()
c.Validate(ksMock)
assert.Equal(t, "po", c.View.Active)
assert.Equal(t, "default", c.Namespace.Active)
assert.Equal(t, 1, len(c.Namespace.Favorites))
assert.Equal(t, []string{"default"}, c.Namespace.Favorites)
}
|
ui-component.ts
|
/*
Copyright (c) 2017-2020 Xiamen Yaji Software Co., Ltd.
http://www.cocos.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated engine source code (the "Software"), a limited,
worldwide, royalty-free, non-assignable, revocable and non-exclusive license
to use Cocos Creator solely to develop games on your target platforms. You shall
not use Cocos Creator software for developing other software or tools that's
used for developing games. You are not granted to publish, distribute,
sublicense, and/or sell copies of Cocos Creator.
|
The software or tools in this License Agreement are licensed, not sold.
Xiamen Yaji Software Co., Ltd. reserves all rights not expressly granted to you.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/**
* @packageDocumentation
* @module ui
*/
import { ccclass, disallowMultiple, executeInEditMode, executionOrder, requireComponent } from 'cc.decorator';
import { IBatcher } from '../renderer/i-batcher';
import { Component } from '../../core/components/component';
import { UITransform } from './ui-transform';
import { Node } from '../../core/scene-graph';
import { Stage } from '../renderer/stencil-manager';
/**
* @en Legacy 2D base class for rendering component, please use [[Renderable2D]] instead.
* This component will setup [[NodeUIProperties.uiComp]] in its owner [[Node]]
* @zh 旧的 2D 渲染组件基类,请使用 [[Renderable2D]] 替代。
* 这个组件会设置 [[Node]] 上的 [[NodeUIProperties.uiComp]]。
* @deprecated since v3.4.1
*/
@ccclass('cc.UIComponent')
@requireComponent(UITransform)
@executionOrder(110)
@disallowMultiple
@executeInEditMode
export class UIComponent extends Component {
protected _lastParent: Node | null = null;
public __preload () {
// @ts-expect-error temporary, UIComponent should be removed
this.node._uiProps.uiComp = this;
}
public onEnable () {
}
public onDisable () {
}
public onDestroy () {
// @ts-expect-error temporary, UIComponent should be removed
if (this.node._uiProps.uiComp === this) {
// @ts-expect-error temporary, UIComponent should be removed
this.node._uiProps.uiComp = null;
}
}
/**
* @en Render data submission procedure, it update and assemble the render data to 2D data buffers before all children submission process.
* Usually called each frame when the ui flow assemble all render data to geometry buffers.
* Don't call it unless you know what you are doing.
* @zh 渲染数据组装程序,这个方法会在所有子节点数据组装之前更新并组装当前组件的渲染数据到 UI 的顶点数据缓冲区中。
* 一般在 UI 渲染流程中调用,用于组装所有的渲染数据到顶点数据缓冲区。
* 注意:不要手动调用该函数,除非你理解整个流程。
*/
public updateAssembler (render: IBatcher) {
}
/**
* @en Post render data submission procedure, it's executed after assembler updated for all children.
* It may assemble some extra render data to the geometry buffers, or it may only change some render states.
* Don't call it unless you know what you are doing.
* @zh 后置渲染数据组装程序,它会在所有子节点的渲染数据组装完成后被调用。
* 它可能会组装额外的渲染数据到顶点数据缓冲区,也可能只是重置一些渲染状态。
* 注意:不要手动调用该函数,除非你理解整个流程。
*/
public postUpdateAssembler (render: IBatcher) {
}
public markForUpdateRenderData (enable = true) {
}
public stencilStage : Stage = Stage.DISABLED;
public setNodeDirty () {
}
public setTextureDirty () {
}
}
| |
2-FetchAPI.js
|
/*Title: Fetch API
Description: Fetch API - JavaScript API for making HTTP requests
Author: Md. Samiur Rahman (Mukul)
Website: http://www.SamiurRahmanMukul.epizy.com
Github: https://www.github.com/SamiurRahmanMukul
Email: [email protected] [FAKE EMAIL]
Date: 06/12/2021 */
/* // ? Fetch API - Fetching data from a server
fetch() has replaced XMLHttpRequest
fetch() - global method for making HTTP Request
2 ways to call - then, async await
+ fetch() is easy to use compare to XMLHttpRequest
+ fetch() returns a promise
- returned promise can only handle network error
- does not support all the older browser */
// ? method for making HTTP Request
const makeRequest = async (url, config) => {
const res = await fetch(url, config);
if (!res.ok) {
const message = `Error : ${res.status}`;
throw new Error(message);
}
const data = await res.json();
return data;
};
// ? make a getData() function to get data from the API server
const getData = () => {
makeRequest("https://jsonplaceholder.typicode.com/posts")
.then((res) => console.log(res))
.catch((err) => console.log(err));
};
getData();
// ? make a sendData() function to send data to the API server
const sendData = () => {
makeRequest("https://jsonplaceholder.typicode.com/posts", {
method: "POST",
body: JSON.stringify({
title: "foo",
body: "bar",
userId: 1,
}),
headers: {
"Content-type": "application/json; charset=UTF-8",
},
})
.then((res) => console.log(res))
.catch((err) => console.log(err));
};
// sendData();
// ? make a updateData() function to update data to the API server
const updateData = () => {
makeRequest("https://jsonplaceholder.typicode.com/posts/1", {
method: "PATCH",
body: JSON.stringify({
title: "Hello World",
|
"Content-type": "application/json; charset=UTF-8",
},
})
.then((res) => console.log(res))
.catch((err) => console.log(err));
};
// updateData();
// ? make a deleteData() function to delete data to the API server
const deleteData = () => {
makeRequest("https://jsonplaceholder.typicode.com/posts/1", {
method: "DELETE",
})
.then((res) => console.log(res))
.catch((err) => console.log(err));
};
// deleteData();
|
}),
headers: {
|
schemas_versions.go
|
/*
* Twilio - Events
*
* This is the public Twilio REST API.
*
* API version: 1.24.0
* Contact: [email protected]
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package openapi
import (
"encoding/json"
"fmt"
"net/url"
"strings"
"github.com/patnunes/twilio-go/client"
)
// Fetch a specific schema and version.
func (c *ApiService) FetchSchemaVersion(Id string, SchemaVersion int) (*EventsV1SchemaVersion, error) {
path := "/v1/Schemas/{Id}/Versions/{SchemaVersion}"
path = strings.Replace(path, "{"+"Id"+"}", Id, -1)
path = strings.Replace(path, "{"+"SchemaVersion"+"}", fmt.Sprint(SchemaVersion), -1)
data := url.Values{}
headers := make(map[string]interface{})
resp, err := c.requestHandler.Get(c.baseURL+path, data, headers)
if err != nil {
return nil, err
}
defer resp.Body.Close()
ps := &EventsV1SchemaVersion{}
if err := json.NewDecoder(resp.Body).Decode(ps); err != nil {
return nil, err
}
return ps, err
}
// Optional parameters for the method 'ListSchemaVersion'
type ListSchemaVersionParams struct {
// How many resources to return in each list page. The default is 50, and the maximum is 1000.
PageSize *int `json:"PageSize,omitempty"`
// Max number of records to return.
Limit *int `json:"limit,omitempty"`
}
func (params *ListSchemaVersionParams) SetPageSize(PageSize int) *ListSchemaVersionParams {
params.PageSize = &PageSize
return params
}
func (params *ListSchemaVersionParams) SetLimit(Limit int) *ListSchemaVersionParams {
params.Limit = &Limit
return params
}
// Retrieve a single page of SchemaVersion records from the API. Request is executed immediately.
func (c *ApiService) PageSchemaVersion(Id string, params *ListSchemaVersionParams, pageToken, pageNumber string) (*ListSchemaVersionResponse, error) {
path := "/v1/Schemas/{Id}/Versions"
path = strings.Replace(path, "{"+"Id"+"}", Id, -1)
data := url.Values{}
headers := make(map[string]interface{})
if params != nil && params.PageSize != nil {
data.Set("PageSize", fmt.Sprint(*params.PageSize))
}
if pageToken != "" {
data.Set("PageToken", pageToken)
}
if pageNumber != "" {
data.Set("Page", pageNumber)
}
resp, err := c.requestHandler.Get(c.baseURL+path, data, headers)
if err != nil {
return nil, err
}
defer resp.Body.Close()
ps := &ListSchemaVersionResponse{}
if err := json.NewDecoder(resp.Body).Decode(ps); err != nil {
return nil, err
}
return ps, err
}
// Lists SchemaVersion records from the API as a list. Unlike stream, this operation is eager and loads 'limit' records into memory before returning.
func (c *ApiService) ListSchemaVersion(Id string, params *ListSchemaVersionParams) ([]EventsV1SchemaVersion, error) {
if params == nil {
params = &ListSchemaVersionParams{}
}
params.SetPageSize(client.ReadLimits(params.PageSize, params.Limit))
response, err := c.PageSchemaVersion(Id, params, "", "")
if err != nil {
return nil, err
}
curRecord := 0
var records []EventsV1SchemaVersion
for response != nil {
|
var record interface{}
if record, err = client.GetNext(c.baseURL, response, &curRecord, params.Limit, c.getNextListSchemaVersionResponse); record == nil || err != nil {
return records, err
}
response = record.(*ListSchemaVersionResponse)
}
return records, err
}
// Streams SchemaVersion records from the API as a channel stream. This operation lazily loads records as efficiently as possible until the limit is reached.
func (c *ApiService) StreamSchemaVersion(Id string, params *ListSchemaVersionParams) (chan EventsV1SchemaVersion, error) {
if params == nil {
params = &ListSchemaVersionParams{}
}
params.SetPageSize(client.ReadLimits(params.PageSize, params.Limit))
response, err := c.PageSchemaVersion(Id, params, "", "")
if err != nil {
return nil, err
}
curRecord := 0
//set buffer size of the channel to 1
channel := make(chan EventsV1SchemaVersion, 1)
go func() {
for response != nil {
for item := range response.SchemaVersions {
channel <- response.SchemaVersions[item]
}
var record interface{}
if record, err = client.GetNext(c.baseURL, response, &curRecord, params.Limit, c.getNextListSchemaVersionResponse); record == nil || err != nil {
close(channel)
return
}
response = record.(*ListSchemaVersionResponse)
}
close(channel)
}()
return channel, err
}
func (c *ApiService) getNextListSchemaVersionResponse(nextPageUrl string) (interface{}, error) {
if nextPageUrl == "" {
return nil, nil
}
resp, err := c.requestHandler.Get(nextPageUrl, nil, nil)
if err != nil {
return nil, err
}
defer resp.Body.Close()
ps := &ListSchemaVersionResponse{}
if err := json.NewDecoder(resp.Body).Decode(ps); err != nil {
return nil, err
}
return ps, nil
}
|
records = append(records, response.SchemaVersions...)
|
integ_test.go
|
// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
// +build go1.15,integration
package neptune_test
import (
"context"
"testing"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/awstesting/integration"
"github.com/aws/aws-sdk-go/service/neptune"
)
var _ aws.Config
var _ awserr.Error
var _ request.Request
|
ctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)
defer cancelFn()
sess := integration.SessionWithDefaultRegion("us-west-2")
svc := neptune.New(sess)
params := &neptune.DescribeDBEngineVersionsInput{}
_, err := svc.DescribeDBEngineVersionsWithContext(ctx, params, func(r *request.Request) {
r.Handlers.Validate.RemoveByName("core.ValidateParametersHandler")
})
if err != nil {
t.Errorf("expect no error, got %v", err)
}
}
func TestInteg_01_DescribeDBInstances(t *testing.T) {
ctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)
defer cancelFn()
sess := integration.SessionWithDefaultRegion("us-west-2")
svc := neptune.New(sess)
params := &neptune.DescribeDBInstancesInput{
DBInstanceIdentifier: aws.String("fake-id"),
}
_, err := svc.DescribeDBInstancesWithContext(ctx, params, func(r *request.Request) {
r.Handlers.Validate.RemoveByName("core.ValidateParametersHandler")
})
if err == nil {
t.Fatalf("expect request to fail")
}
aerr, ok := err.(awserr.RequestFailure)
if !ok {
t.Fatalf("expect awserr, was %T", err)
}
if len(aerr.Code()) == 0 {
t.Errorf("expect non-empty error code")
}
if len(aerr.Message()) == 0 {
t.Errorf("expect non-empty error message")
}
if v := aerr.Code(); v == request.ErrCodeSerialization {
t.Errorf("expect API error code got serialization failure")
}
}
|
func TestInteg_00_DescribeDBEngineVersions(t *testing.T) {
|
metadata.ts
|
import * as ts from 'typescript';
import {RawSourceMap} from 'source-map';
export interface CodeWithSourceMap {
code: string;
source?: string;
map?: RawSourceMap;
}
export interface TemplateMetadata {
template: CodeWithSourceMap;
node: ts.Node;
url: string;
}
export interface StyleMetadata {
style: CodeWithSourceMap;
node: ts.Node;
url: string;
}
export interface StylesMetadata {
[index: number]: StyleMetadata;
length: number;
push(e: StyleMetadata): number;
}
export class DirectiveMetadata {
selector: string;
|
controller: ts.ClassDeclaration;
decorator: ts.Decorator;
}
export class ComponentMetadata extends DirectiveMetadata {
template: TemplateMetadata;
styles: StylesMetadata;
}
| |
ar.js
|
/*
Copyright (c) 2003-2017, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang( 'basicstyles', 'ar', {
bold: 'عريض',
|
superscript: 'مرتفع',
underline: 'تسطير'
} );
|
italic: 'مائل',
strike: 'يتوسطه خط',
subscript: 'منخفض',
|
wrtiming3.rs
|
#[doc = "Reader of register WRTIMING3"]
pub type R = crate::R<u32, super::WRTIMING3>;
#[doc = "Writer for register WRTIMING3"]
pub type W = crate::W<u32, super::WRTIMING3>;
#[doc = "Register WRTIMING3 `reset()`'s with value 0x0007_7f07"]
impl crate::ResetValue for super::WRTIMING3 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0007_7f07
}
}
#[doc = "Reader of field `WRSETUP`"]
pub type WRSETUP_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WRSETUP`"]
pub struct WRSETUP_W<'a> {
w: &'a mut W,
}
impl<'a> WRSETUP_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x07) | ((value as u32) & 0x07);
self.w
}
}
#[doc = "Reader of field `WRSTRB`"]
pub type WRSTRB_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WRSTRB`"]
pub struct
|
<'a> {
w: &'a mut W,
}
impl<'a> WRSTRB_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x7f << 8)) | (((value as u32) & 0x7f) << 8);
self.w
}
}
#[doc = "Reader of field `WRHOLD`"]
pub type WRHOLD_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WRHOLD`"]
pub struct WRHOLD_W<'a> {
w: &'a mut W,
}
impl<'a> WRHOLD_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 16)) | (((value as u32) & 0x07) << 16);
self.w
}
}
#[doc = "Reader of field `HALFWE`"]
pub type HALFWE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `HALFWE`"]
pub struct HALFWE_W<'a> {
w: &'a mut W,
}
impl<'a> HALFWE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28);
self.w
}
}
#[doc = "Reader of field `WBUFDIS`"]
pub type WBUFDIS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `WBUFDIS`"]
pub struct WBUFDIS_W<'a> {
w: &'a mut W,
}
impl<'a> WBUFDIS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29);
self.w
}
}
impl R {
#[doc = "Bits 0:2 - Write Setup Time"]
#[inline(always)]
pub fn wrsetup(&self) -> WRSETUP_R {
WRSETUP_R::new((self.bits & 0x07) as u8)
}
#[doc = "Bits 8:14 - Write Strobe Time"]
#[inline(always)]
pub fn wrstrb(&self) -> WRSTRB_R {
WRSTRB_R::new(((self.bits >> 8) & 0x7f) as u8)
}
#[doc = "Bits 16:18 - Write Hold Time"]
#[inline(always)]
pub fn wrhold(&self) -> WRHOLD_R {
WRHOLD_R::new(((self.bits >> 16) & 0x07) as u8)
}
#[doc = "Bit 28 - Half Cycle WEn Strobe Duration Enable"]
#[inline(always)]
pub fn halfwe(&self) -> HALFWE_R {
HALFWE_R::new(((self.bits >> 28) & 0x01) != 0)
}
#[doc = "Bit 29 - Write Buffer Disable"]
#[inline(always)]
pub fn wbufdis(&self) -> WBUFDIS_R {
WBUFDIS_R::new(((self.bits >> 29) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:2 - Write Setup Time"]
#[inline(always)]
pub fn wrsetup(&mut self) -> WRSETUP_W {
WRSETUP_W { w: self }
}
#[doc = "Bits 8:14 - Write Strobe Time"]
#[inline(always)]
pub fn wrstrb(&mut self) -> WRSTRB_W {
WRSTRB_W { w: self }
}
#[doc = "Bits 16:18 - Write Hold Time"]
#[inline(always)]
pub fn wrhold(&mut self) -> WRHOLD_W {
WRHOLD_W { w: self }
}
#[doc = "Bit 28 - Half Cycle WEn Strobe Duration Enable"]
#[inline(always)]
pub fn halfwe(&mut self) -> HALFWE_W {
HALFWE_W { w: self }
}
#[doc = "Bit 29 - Write Buffer Disable"]
#[inline(always)]
pub fn wbufdis(&mut self) -> WBUFDIS_W {
WBUFDIS_W { w: self }
}
}
|
WRSTRB_W
|
packet.py
|
from linptech.crc8 import crc8
import logging
class Packet(object):
'''
Base class for Packet.
Mainly used for for packet generation and
Packet.parse_msg(buf) for parsing message.
parse_msg() returns subclass, if one is defined for the data type.
'''
def __init__(self, data=None, optional="00"*7):
if data is None:
logging.warning('Packet.data is None')
else:
self.data = data
if optional is None:
logging.info('Packet.optional is None.')
else:
self.optional = optional
@staticmethod
def check(packet):
|
@staticmethod
def parse(packet):
"""
parse an packet to data and optional for receive
"""
if Packet.check(packet):
try:
data_len=int(packet[4:6],16)
data=packet[12:12+data_len*2]
optional=packet[12+data_len*2:26+data_len*2]
return data,optional
except Exception as e:
logging.error("parse packet wrong:%s",e)
return
else :
logging.error("packet is invalid")
return
@staticmethod
def create(data=None, optional="00"*7):
"""
Creates an packet ready for sending.
Uses data and optional.
"""
try:
data_len = "{0:>02}".format(hex(int(len(data)/2))[2:])
m1 = "00"+data_len+"0701"
m2 = data+optional
packet = "55"+m1+crc8(m1)+m2+crc8(m2)
return packet
except Exception as e:
logging.error("create packet wrong:%s",e)
return
if __name__ == "__main__":
logging.getLogger().setLevel(logging.INFO)
data="1f8000004581020101"
Packet.create(data)
|
"""
check packet with crc
"""
if packet.startswith("550") and \
crc8(packet[2:10])==packet[10:12] and \
crc8(packet[12:-2])==packet[-2:]:
return True
else:
return False
|
FalseNegative.spec.tsx
|
import React from 'react';
import userEvent from '@testing-library/user-event';
import { render, screen } from '@testing-library/react';
// Components
import { FalseNegative } from '.';
test('should increade, decrease, reset and check the counter value - by text', () => {
const { debug } = render(<FalseNegative />);
debug();
expect(screen.getByText('0')).not.toBeNull();
userEvent.click(screen.getByText('-'));
userEvent.click(screen.getByText('-'));
expect(screen.getByText('-2')).not.toBeNull();
userEvent.click(screen.getByText('+'));
expect(screen.getByText('-1')).not.toBeNull();
userEvent.click(screen.getByText('Reset'));
expect(screen.getByText('0')).not.toBeNull();
});
test('should increade, decrease, reset and check the counter value - by tagname', () => {
const { debug, container } = render(<FalseNegative />);
debug();
expect(container.querySelector('span')?.textContent).toBe('0');
userEvent.click(container.querySelectorAll('button')[1]);
userEvent.click(container.querySelectorAll('button')[1]);
expect(container.querySelector('span')?.textContent).toBe('-2');
userEvent.click(container.querySelectorAll('button')[2]);
expect(container.querySelector('span')?.textContent).toBe('-1');
userEvent.click(container.querySelectorAll('button')[0]);
expect(container.querySelector('span')?.textContent).toBe('0');
});
test('should increade, decrease, reset and check the counter value - by aria label', () => {
const { debug } = render(<FalseNegative />);
debug();
expect(screen.getByLabelText('Counter').textContent).toBe('0');
|
userEvent.click(screen.getByLabelText('Decrease'));
expect(screen.getByLabelText('Counter').textContent).toBe('-2');
userEvent.click(screen.getByLabelText('Increase'));
expect(screen.getByLabelText('Counter').textContent).toBe('-1');
userEvent.click(screen.getByLabelText('Reset'));
expect(screen.getByLabelText('Counter').textContent).toBe('0');
});
|
userEvent.click(screen.getByLabelText('Decrease'));
|
VerticalDivider.js
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
|
var React = require("react");
var VerticalDivider_classNames_1 = require("./VerticalDivider.classNames");
var Styling_1 = require("../../Styling");
exports.VerticalDivider = function (props) {
var theme = Styling_1.getTheme();
var classNames = props.getClassNames ? props.getClassNames(theme) : VerticalDivider_classNames_1.getDividerClassNames(theme);
return (React.createElement("span", { className: classNames.wrapper },
React.createElement("span", { className: classNames.divider })));
};
//# sourceMappingURL=VerticalDivider.js.map
| |
ui-ionic.config.ts
|
import { ConfigOption } from '@ruslanguns/core';
import {
FormlyFieldInput,
FormlyFieldCheckbox,
FormlyFieldDatetime,
FormlyFieldRadio,
FormlyFieldToggle,
FormlyFieldSelect,
FormlyFieldRange,
FormlyFieldTextArea,
} from './types/types';
import { FormlyWrapperFormField } from './wrappers/wrappers';
export const FIELD_TYPE_COMPONENTS = [
// types
FormlyFieldInput,
FormlyFieldCheckbox,
FormlyFieldDatetime,
FormlyFieldRadio,
FormlyFieldToggle,
FormlyFieldSelect,
FormlyFieldRange,
FormlyFieldTextArea,
|
// wrappers
FormlyWrapperFormField,
];
export const IONIC_FORMLY_CONFIG: ConfigOption = {
types: [
{
name: 'input',
component: FormlyFieldInput,
wrappers: ['form-field'],
},
{
name: 'checkbox',
component: FormlyFieldCheckbox,
wrappers: ['form-field'],
},
{
name: 'datetime',
component: FormlyFieldDatetime,
wrappers: ['form-field'],
},
{
name: 'radio',
component: FormlyFieldRadio,
wrappers: [],
},
{
name: 'toggle',
component: FormlyFieldToggle,
wrappers: ['form-field'],
},
{
name: 'select',
component: FormlyFieldSelect,
wrappers: ['form-field'],
},
{
name: 'range',
component: FormlyFieldRange,
wrappers: ['form-field'],
},
{
name: 'textarea',
component: FormlyFieldTextArea,
wrappers: ['form-field'],
},
],
wrappers: [
{ name: 'form-field', component: FormlyWrapperFormField },
],
};
| |
bmi-calculator.js
|
function calculateBMI(weight, height) {
// Calculates the BMI through the following formulaL:
|
// BMI=Weight(kg)/height^2(m)
bmi = weight / Math.pow(height, 2);
rounded_bmi = Math.round(bmi);
return rounded_bmi;
}
bmi=calculateBMI(60, 1.6)
console.log(bmi);
| |
ex78.py
|
from reliability.Reliability_testing import one_sample_proportion
result = one_sample_proportion(trials=30, successes=29)
print(result)
|
(0.8278305443665873, 0.9991564290733695)
'''
|
'''
|
right-sidebar.component.ts
|
import { Pipe, PipeTransform, Output, EventEmitter } from '@angular/core';
import { Component, OnInit, HostListener } from '@angular/core';
import { Input } from "@angular/core";
import { FormGroup } from "@angular/forms";
import { RxFormBuilder } from "@rxweb/reactive-form-validators";
import { FeedbackModel } from "src/app/components/shared/right-sidebar/domain/feedback.model";
import { Http } from "@angular/http";
import { RequestOptionsArgs } from "@angular/http";
import { RequestOptions } from "@angular/http";
import { HttpClient } from "@angular/common/http";
import { HttpHeaders } from "@angular/common/http";
import { Router, NavigationEnd } from '@angular/router';
import { DomSanitizer, SafeResourceUrl } from '@angular/platform-browser';
@Component({
selector: 'app-right-sidebar',
templateUrl: './right-sidebar.component.html',
})
export class
|
implements OnInit {
public feedbackForm: FormGroup
sticky: boolean = false;
gitAsideUrl: SafeResourceUrl;
validationName: string;
isGitEditUrlShow: boolean = true;
mainType: string;
toogleOpen: boolean = true;
showExample: boolean = true;
httpOptions = { headers: new HttpHeaders({ 'Content-Type': 'application/json' }) };
constructor(
private http: HttpClient, private router: Router, private formBuilder: RxFormBuilder, private sanitizer: DomSanitizer
) {
this.gitAsideUrl = sanitizer.bypassSecurityTrustResourceUrl("https://gitter.im/rxweb-project/rxweb/~embed");
}
@Input('sidebarLinks') sidebarLinks: any = {};
showComponent: boolean = false;
contributorList: any = [];
gitEditUrl: string = "https://github.com/rxweb/rxweb.io/edit/master/";
@HostListener('window:scroll', ['$event'])
handleScroll() {
const windowScroll = document.documentElement.scrollTop;
if (windowScroll >= 50) {
this.sticky = true;
} else {
this.sticky = false;
}
}
ngOnInit(): void {
if (this.router.url.includes('whats-new') || this.router.url.includes('whats-next') || this.router.url.includes('why') || this.router.url.includes('getting-started') || this.router.url.includes('reactive-form-config'))
this.isGitEditUrlShow = false;
var splitedArray = this.router.url.split("/");
this.mainType = splitedArray[1];
this.validationName = splitedArray[2];
if (splitedArray.length > 0 && splitedArray[1]) {
switch (splitedArray[1]) {
case "decorators":
this.gitEditUrl += "docs/reactive-form-validators/decorators/" + splitedArray[2] + ".md"
break;
case "form-validations":
this.gitEditUrl += "docs/reactive-form-validators/validation-decorators/" + splitedArray[2] + ".md"
break;
case "api":
this.gitEditUrl += "docs/reactive-form-validators/api/" + splitedArray[2] + ".md"
break;
case "community":
this.gitEditUrl += "docs/community/" + splitedArray[2] + ".md"
break;
case "sanitization":
this.gitEditUrl += "docs/sanitization/" + splitedArray[2] + ".md"
break
case "how-to":
this.gitEditUrl += "docs/how-to/" + splitedArray[2] + ".md"
break
case "reactive-dynamic-forms":
this.gitEditUrl += "docs/dynamic-forms/" +
splitedArray[3] + ".md"
case "rx-web-core":
if(splitedArray[4])
this.gitEditUrl += "docs/rx-web-core/" + splitedArray[2] +"/" + splitedArray[3] + "/" + splitedArray[4] + ".md"
else
this.gitEditUrl += "docs/rx-web-core/" + splitedArray[2] +"/" + splitedArray[3] + ".md"
}
}
else if (splitedArray.length > 0 && splitedArray[0] == "changelog") {
this.gitEditUrl += "CHANGELOG.md"
}
if (this.mainType != "community") {
// this.sidebarLinks.splice(0, 1);
}
this.showComponent = true;
}
scrollTo(section) {
var node = document.querySelector('#' + section);
node.scrollIntoView(true);
var scrolledY = window.scrollY;
if (scrolledY) {
window.scroll(0, scrolledY - 62);
}
return false;
}
routeExample() {
this.toogleOpen = !this.toogleOpen;
this.showExample = !this.showExample;
var splitedArray = this.router.url.split('/');
if (splitedArray[4])
this.router.navigate(['/', splitedArray[1], splitedArray[2], splitedArray[3], splitedArray[4]], { queryParams: { showExample: this.showExample }, replaceUrl: false });
else
this.router.navigate(['/', splitedArray[1], splitedArray[2], splitedArray[3]], { queryParams: { showExample: this.showExample }, replaceUrl: false });
}
}
|
RightSideBarComponent
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.