file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
datasource.go | /*
Copyright 2018 The CDI Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
package v1beta1
import (
"context"
"time"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
types "k8s.io/apimachinery/pkg/types"
watch "k8s.io/apimachinery/pkg/watch"
rest "k8s.io/client-go/rest"
v1beta1 "kubevirt.io/containerized-data-importer/pkg/apis/core/v1beta1"
scheme "kubevirt.io/containerized-data-importer/pkg/client/clientset/versioned/scheme"
)
// DataSourcesGetter has a method to return a DataSourceInterface.
// A group's client should implement this interface.
type DataSourcesGetter interface {
DataSources(namespace string) DataSourceInterface
}
// DataSourceInterface has methods to work with DataSource resources.
type DataSourceInterface interface {
Create(ctx context.Context, dataSource *v1beta1.DataSource, opts v1.CreateOptions) (*v1beta1.DataSource, error)
Update(ctx context.Context, dataSource *v1beta1.DataSource, opts v1.UpdateOptions) (*v1beta1.DataSource, error)
UpdateStatus(ctx context.Context, dataSource *v1beta1.DataSource, opts v1.UpdateOptions) (*v1beta1.DataSource, error)
Delete(ctx context.Context, name string, opts v1.DeleteOptions) error
DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error
Get(ctx context.Context, name string, opts v1.GetOptions) (*v1beta1.DataSource, error)
List(ctx context.Context, opts v1.ListOptions) (*v1beta1.DataSourceList, error)
Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error)
Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.DataSource, err error)
DataSourceExpansion
}
// dataSources implements DataSourceInterface
type dataSources struct {
client rest.Interface
ns string
}
// newDataSources returns a DataSources
func newDataSources(c *CdiV1beta1Client, namespace string) *dataSources {
return &dataSources{
client: c.RESTClient(),
ns: namespace,
}
}
// Get takes name of the dataSource, and returns the corresponding dataSource object, and an error if there is any.
func (c *dataSources) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.DataSource, err error) {
result = &v1beta1.DataSource{}
err = c.client.Get().
Namespace(c.ns).
Resource("datasources").
Name(name).
VersionedParams(&options, scheme.ParameterCodec).
Do(ctx).
Into(result)
return
}
// List takes label and field selectors, and returns the list of DataSources that match those selectors.
func (c *dataSources) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.DataSourceList, err error) {
var timeout time.Duration
if opts.TimeoutSeconds != nil {
timeout = time.Duration(*opts.TimeoutSeconds) * time.Second
}
result = &v1beta1.DataSourceList{}
err = c.client.Get().
Namespace(c.ns).
Resource("datasources").
VersionedParams(&opts, scheme.ParameterCodec).
Timeout(timeout).
Do(ctx).
Into(result)
return
}
// Watch returns a watch.Interface that watches the requested dataSources.
func (c *dataSources) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) {
var timeout time.Duration
if opts.TimeoutSeconds != nil {
timeout = time.Duration(*opts.TimeoutSeconds) * time.Second
}
opts.Watch = true
return c.client.Get(). | Timeout(timeout).
Watch(ctx)
}
// Create takes the representation of a dataSource and creates it. Returns the server's representation of the dataSource, and an error, if there is any.
func (c *dataSources) Create(ctx context.Context, dataSource *v1beta1.DataSource, opts v1.CreateOptions) (result *v1beta1.DataSource, err error) {
result = &v1beta1.DataSource{}
err = c.client.Post().
Namespace(c.ns).
Resource("datasources").
VersionedParams(&opts, scheme.ParameterCodec).
Body(dataSource).
Do(ctx).
Into(result)
return
}
// Update takes the representation of a dataSource and updates it. Returns the server's representation of the dataSource, and an error, if there is any.
func (c *dataSources) Update(ctx context.Context, dataSource *v1beta1.DataSource, opts v1.UpdateOptions) (result *v1beta1.DataSource, err error) {
result = &v1beta1.DataSource{}
err = c.client.Put().
Namespace(c.ns).
Resource("datasources").
Name(dataSource.Name).
VersionedParams(&opts, scheme.ParameterCodec).
Body(dataSource).
Do(ctx).
Into(result)
return
}
// UpdateStatus was generated because the type contains a Status member.
// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus().
func (c *dataSources) UpdateStatus(ctx context.Context, dataSource *v1beta1.DataSource, opts v1.UpdateOptions) (result *v1beta1.DataSource, err error) {
result = &v1beta1.DataSource{}
err = c.client.Put().
Namespace(c.ns).
Resource("datasources").
Name(dataSource.Name).
SubResource("status").
VersionedParams(&opts, scheme.ParameterCodec).
Body(dataSource).
Do(ctx).
Into(result)
return
}
// Delete takes name of the dataSource and deletes it. Returns an error if one occurs.
func (c *dataSources) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error {
return c.client.Delete().
Namespace(c.ns).
Resource("datasources").
Name(name).
Body(&opts).
Do(ctx).
Error()
}
// DeleteCollection deletes a collection of objects.
func (c *dataSources) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {
var timeout time.Duration
if listOpts.TimeoutSeconds != nil {
timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second
}
return c.client.Delete().
Namespace(c.ns).
Resource("datasources").
VersionedParams(&listOpts, scheme.ParameterCodec).
Timeout(timeout).
Body(&opts).
Do(ctx).
Error()
}
// Patch applies the patch and returns the patched dataSource.
func (c *dataSources) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.DataSource, err error) {
result = &v1beta1.DataSource{}
err = c.client.Patch(pt).
Namespace(c.ns).
Resource("datasources").
Name(name).
SubResource(subresources...).
VersionedParams(&opts, scheme.ParameterCodec).
Body(data).
Do(ctx).
Into(result)
return
} | Namespace(c.ns).
Resource("datasources").
VersionedParams(&opts, scheme.ParameterCodec). |
issue-11552.rs | // http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[deriving(Clone)]
enum Noun
{
Atom(int),
Cell(Box<Noun>, Box<Noun>)
}
fn fas(n: &Noun) -> Noun
{
match n {
&Noun::Cell(box Noun::Atom(2), box Noun::Cell(ref a, _)) => (**a).clone(),
_ => panic!("Invalid fas pattern")
}
}
pub fn main() {
fas(&Noun::Cell(box Noun::Atom(2), box Noun::Cell(box Noun::Atom(2), box Noun::Atom(3))));
} | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at |
|
kendo.culture.saq-KE.js | module.exports =
/******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ({
/***/ 0:
/***/ (function(module, exports, __webpack_require__) {
__webpack_require__(693);
module.exports = __webpack_require__(693);
/***/ }),
/***/ 693:
/***/ (function(module, exports) {
(function( window, undefined ) {
kendo.cultures["saq-KE"] = {
name: "saq-KE",
numberFormat: {
pattern: ["-n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
percent: {
pattern: ["-n%","n%"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "%"
},
currency: {
name: "Kenyan Shilling",
abbr: "KES",
pattern: ["-$n","$n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "Ksh"
}
},
calendars: {
standard: {
days: {
names: ["Mderot ee are","Mderot ee kuni","Mderot ee ong’wan","Mderot ee inet","Mderot ee ile","Mderot ee sapa","Mderot ee kwe"],
namesAbbr: ["Are","Kun","Ong","Ine","Ile","Sap","Kwe"],
namesShort: ["Are","Kun","Ong","Ine","Ile","Sap","Kwe"]
},
months: {
names: ["Lapa le obo","Lapa le waare","Lapa le okuni","Lapa le ong’wan","Lapa le imet","Lapa le ile","Lapa le sapa","Lapa le isiet","Lapa le saal","Lapa le tomon","Lapa le tomon obo","Lapa le tomon waare"],
namesAbbr: ["Obo","Waa","Oku","Ong","Ime","Ile","Sap","Isi","Saa","Tom","Tob","Tow"]
},
AM: ["Tesiran","tesiran","TESIRAN"],
PM: ["Teipa","teipa","TEIPA"],
patterns: {
d: "dd/MM/yyyy",
D: "dddd, d MMMM yyyy",
F: "dddd, d MMMM yyyy HH:mm:ss",
g: "dd/MM/yyyy HH:mm",
G: "dd/MM/yyyy HH:mm:ss",
m: "MMMM d",
M: "MMMM d",
s: "yyyy'-'MM'-'dd'T'HH':'mm':'ss",
t: "HH:mm",
T: "HH:mm:ss",
u: "yyyy'-'MM'-'dd HH':'mm':'ss'Z'",
y: "MMMM yyyy",
Y: "MMMM yyyy"
},
"/": "/",
":": ":",
firstDay: 0
} | })(this);
/***/ })
/******/ }); | }
} |
banneradd.model.ts | /*
* spurtcommerce
* version 3.0
* http://www.spurtcommerce.com
*
* Copyright (c) 2019 piccosoft ltd
* Author piccosoft ltd <[email protected]>
* Licensed under the MIT license.
*/
export class BanneraddModel {
public title: string;
public content: string;
public image: string;
public link: string;
public position: string;
public status: number; | this.image = bannerForm.image || '';
this.link = bannerForm.link || '';
this.position = bannerForm.position || '';
this.status = bannerForm.active || 0;
}
} |
constructor(bannerForm: any) {
this.title = bannerForm.title || '';
this.content = bannerForm.content || ''; |
Bloxorz.py | # 0 is for perpendicular mode
# 1 is for flat mode
# 0 is for X-Axis config
# 1 is for Y-Axis mode
from copy import deepcopy
class Block:
def __init__(self, givenboard, mode, config, positionfirstbox, positionsecondbox):
# Copy Board
self.board = givenboard
# Fill the Board with Block
self.board.field[positionfirstbox[0]][positionfirstbox[1]] = 2
if positionsecondbox != []:
self.board.field[positionsecondbox[0]][positionsecondbox[1]] = 2
self.mode = mode
self.config = config
self.positionFirstBox = positionfirstbox
self.positionSecondBox = positionsecondbox
def isgamewon(self):
if self.mode == 0 and self.positionFirstBox == self.board.goal:
return True
else:
return False
def ismovableleft(self):
try:
if self.mode == 0:
if self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1] - 1] != 1 \
and self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1] - 2] != 1:
return True
else:
return False
elif self.mode == 1:
if self.config == 0:
if self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1] - 1] != 1:
return True
else:
return False
if self.config == 1:
if self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1] - 1] != 1 \
and self.board.field[self.positionSecondBox[0]][self.positionSecondBox[1] - 1] != 1:
return True
else:
return False
except IndexError:
return False
def ismovableright(self):
try:
if self.mode == 0:
if self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1] + 1] != 1 \
and self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1] + 2] != 1:
return True
else:
return False
elif self.mode == 1:
if self.config == 0:
if self.board.field[self.positionSecondBox[0]][self.positionSecondBox[1] + 1] != 1:
return True
else:
return False
if self.config == 1:
if self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1] + 1] != 1 \
and self.board.field[self.positionSecondBox[0]][self.positionSecondBox[1] + 1] != 1:
return True
else:
return False
except IndexError:
return False
def ismovableup(self):
try:
if self.mode == 0:
if self.board.field[self.positionFirstBox[0] - 1][self.positionFirstBox[1]] != 1 \
and self.board.field[self.positionFirstBox[0] - 2][self.positionFirstBox[1]] != 1:
return True
else:
return False
elif self.mode == 1:
if self.config == 0:
if self.board.field[self.positionFirstBox[0] - 1][self.positionFirstBox[1]] != 1 \
and self.board.field[self.positionSecondBox[0] - 1][self.positionSecondBox[1]] != 1:
return True
else:
return False
elif self.config == 1:
if self.board.field[self.positionFirstBox[0] - 1][self.positionFirstBox[1]] != 1:
return True
else:
return False
except IndexError:
return False
def ismovabledown(self):
try:
if self.mode == 0:
if self.board.field[self.positionFirstBox[0] + 1][self.positionFirstBox[1]] != 1 \
and self.board.field[self.positionFirstBox[0] + 2][self.positionFirstBox[1]] != 1:
return True
else:
return False
elif self.mode == 1:
if self.config == 0:
if self.board.field[self.positionFirstBox[0] + 1][self.positionFirstBox[1]] != 1 \
and self.board.field[self.positionSecondBox[0] + 1][self.positionSecondBox[1]] != 1:
return True
else:
return False
elif self.config == 1:
if self.board.field[self.positionSecondBox[0] + 1][self.positionSecondBox[1]] != 1:
return True
else:
return False
except IndexError:
return False
def getleft(self):
if self.mode == 0:
# Object location
secondbox = [self.positionFirstBox[0], self.positionFirstBox[1] - 1]
firstbox = [self.positionFirstBox[0], self.positionFirstBox[1] - 2]
return [firstbox, secondbox, 1, 0]
elif self.mode == 1:
if self.config == 0:
firstbox = [self.positionFirstBox[0], self.positionFirstBox[1] - 1]
return [firstbox, [], 0, self.config]
if self.config == 1:
positionSecondBox = [self.positionSecondBox[0], self.positionSecondBox[1] - 1]
positionFirstBox = [self.positionFirstBox[0], self.positionFirstBox[1] - 1]
return [positionFirstBox, positionSecondBox, 1, self.config]
def moveleft(self):
if self.mode == 0:
if self.ismovableleft():
# Erase the object from board
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1]] = 0
# Re-put object
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1] - 1] = 2
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1] - 2] = 2
# Update object location
self.positionSecondBox = [self.positionFirstBox[0], self.positionFirstBox[1] - 1]
self.positionFirstBox = [self.positionFirstBox[0], self.positionFirstBox[1] - 2]
# Change Mode and Config
self.mode = 1
self.config = 0
return True
else:
return False
elif self.mode == 1:
if self.ismovableleft():
if self.config == 0:
# Erase the object from board
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1]] = 0
self.board.field[self.positionSecondBox[0]][self.positionSecondBox[1]] = 0
# Re-put object
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1] - 1] = 2
# Update object location
self.positionSecondBox = []
self.positionFirstBox = [self.positionFirstBox[0], self.positionFirstBox[1] - 1]
# Change Mode
self.mode = 0
return True
if self.config == 1:
# Erase the object from board
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1]] = 0
self.board.field[self.positionSecondBox[0]][self.positionSecondBox[1]] = 0
# Re-put object
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1] - 1] = 2
self.board.field[self.positionSecondBox[0]][self.positionSecondBox[1] - 1] = 2
# Update object location
self.positionSecondBox = [self.positionSecondBox[0], self.positionSecondBox[1] - 1]
self.positionFirstBox = [self.positionFirstBox[0], self.positionFirstBox[1] - 1]
return True
else:
return False
def moveright(self):
if self.mode == 0:
if self.ismovableright():
# Erase the object from board
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1]] = 0
# Re-put object
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1] + 1] = 2
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1] + 2] = 2
# Update object location
self.positionSecondBox = [self.positionFirstBox[0], self.positionFirstBox[1] + 2]
self.positionFirstBox = [self.positionFirstBox[0], self.positionFirstBox[1] + 1]
# Change Mode
self.mode = 1
self.config = 0
return True
else:
return False
elif self.mode == 1:
if self.ismovableright():
if self.config == 0:
# Erase the object from board
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1]] = 0
self.board.field[self.positionSecondBox[0]][self.positionSecondBox[1]] = 0
# Re-put object
self.board.field[self.positionFirstBox[0]][self.positionSecondBox[1] + 1] = 2
# Update object location
self.positionFirstBox = [self.positionFirstBox[0], self.positionSecondBox[1] + 1]
self.positionSecondBox = []
# Change Mode
self.mode = 0
return True
if self.config == 1:
# Erase the object from board
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1]] = 0
self.board.field[self.positionSecondBox[0]][self.positionSecondBox[1]] = 0
# Re-put object
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1] + 1] = 2
self.board.field[self.positionSecondBox[0]][self.positionSecondBox[1] + 1] = 2
# Update object location
self.positionFirstBox = [self.positionFirstBox[0], self.positionFirstBox[1] + 1]
self.positionSecondBox = [self.positionSecondBox[0], self.positionSecondBox[1] + 1]
return True
else:
return False
def getright(self):
|
def moveup(self):
if self.mode == 0:
if self.ismovableup():
# Erase the object from board
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1]] = 0
# Re-put object
self.board.field[self.positionFirstBox[0] - 1][self.positionFirstBox[1]] = 2
self.board.field[self.positionFirstBox[0] - 2][self.positionFirstBox[1]] = 2
# Update object location
self.positionSecondBox = [self.positionFirstBox[0] - 1, self.positionFirstBox[1]]
self.positionFirstBox = [self.positionFirstBox[0] - 2, self.positionFirstBox[1]]
# Change Mode
self.mode = 1
self.config = 1
return True
else:
return False
elif self.mode == 1:
if self.ismovableup():
if self.config == 0:
# Erase the object from board
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1]] = 0
self.board.field[self.positionSecondBox[0]][self.positionSecondBox[1]] = 0
# Re-put object
self.board.field[self.positionFirstBox[0] - 1][self.positionFirstBox[1]] = 2
self.board.field[self.positionSecondBox[0] - 1][self.positionSecondBox[1]] = 2
# Update object location
self.positionSecondBox = [self.positionSecondBox[0] - 1, self.positionSecondBox[1]]
self.positionFirstBox = [self.positionFirstBox[0] - 1, self.positionFirstBox[1]]
return True
elif self.config == 1:
# Erase the object from board
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1]] = 0
self.board.field[self.positionSecondBox[0]][self.positionSecondBox[1]] = 0
# Re-put object
self.board.field[self.positionFirstBox[0] - 1][self.positionFirstBox[1]] = 2
# Update object location
self.positionFirstBox = [self.positionFirstBox[0] - 1, self.positionFirstBox[1]]
self.positionSecondBox = []
# Change Mode
self.mode = 0
return True
else:
return False
def getup(self):
if self.mode == 0:
# Object location
secondbox = [self.positionFirstBox[0] - 1, self.positionFirstBox[1]]
firstbox = [self.positionFirstBox[0] - 2, self.positionFirstBox[1]]
return [firstbox, secondbox, 1, 1]
elif self.mode == 1:
if self.config == 0:
positionSecondBox = [self.positionSecondBox[0] - 1, self.positionSecondBox[1]]
positionFirstBox = [self.positionFirstBox[0] - 1, self.positionFirstBox[1]]
return [positionFirstBox, positionSecondBox, self.mode, self.config]
if self.config == 1:
positionFirstBox = [self.positionFirstBox[0] - 1, self.positionFirstBox[1]]
positionSecondBox = []
return [positionFirstBox, positionSecondBox, 0, self.config]
def movedown(self):
if self.mode == 0:
if self.ismovabledown():
# Erase the object from board
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1]] = 0
# Re-put object
self.board.field[self.positionFirstBox[0] + 1][self.positionFirstBox[1]] = 2
self.board.field[self.positionFirstBox[0] + 2][self.positionFirstBox[1]] = 2
# Update object location
self.positionSecondBox = [self.positionFirstBox[0] + 2, self.positionFirstBox[1]]
self.positionFirstBox = [self.positionFirstBox[0] + 1, self.positionFirstBox[1]]
# Change Mode
self.mode = 1
self.config = 1
return True
else:
return False
elif self.mode == 1:
if self.ismovabledown():
if self.config == 0:
# Erase the object from board
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1]] = 0
self.board.field[self.positionSecondBox[0]][self.positionSecondBox[1]] = 0
# Re-put object
self.board.field[self.positionFirstBox[0] + 1][self.positionFirstBox[1]] = 2
self.board.field[self.positionSecondBox[0] + 1][self.positionSecondBox[1]] = 2
# Update object location
self.positionSecondBox = [self.positionSecondBox[0] + 1, self.positionSecondBox[1]]
self.positionFirstBox = [self.positionFirstBox[0] + 1, self.positionFirstBox[1]]
return True
elif self.config == 1:
# Erase the object from board
self.board.field[self.positionFirstBox[0]][self.positionFirstBox[1]] = 0
self.board.field[self.positionSecondBox[0]][self.positionSecondBox[1]] = 0
# Re-put object
self.board.field[self.positionSecondBox[0] + 1][self.positionSecondBox[1]] = 2
# Update object location
self.positionFirstBox = [self.positionSecondBox[0] + 1, self.positionFirstBox[1]]
self.positionSecondBox = []
# Change Mode
self.mode = 0
return True
else:
return False
def getdown(self):
if self.mode == 0:
# Object location
secondbox = [self.positionFirstBox[0] + 2, self.positionFirstBox[1]]
firstbox = [self.positionFirstBox[0] + 1, self.positionFirstBox[1]]
return [firstbox, secondbox, 1, 1]
elif self.mode == 1:
if self.config == 0:
# Adjust the box positions
positionSecondBox = [self.positionSecondBox[0] + 1, self.positionSecondBox[1]]
positionFirstBox = [self.positionFirstBox[0] + 1, self.positionFirstBox[1]]
return [positionFirstBox, positionSecondBox, self.mode, self.config]
if self.config == 1:
# Adjust the box positions
positionFirstBox = [self.positionSecondBox[0] + 1, self.positionFirstBox[1]]
positionSecondBox = []
return [positionFirstBox, positionSecondBox, 0, self.config]
def printfield(self):
printer = deepcopy(self.board.field).astype(str)
# Transfer the field and print
for i in range(self.board.field.shape[0]):
for j in range(self.board.field.shape[1]):
if self.board.field[i][j] == 1:
printer[i][j] = 'X'
elif self.board.field[i][j] == 0:
printer[i][j] = 'O'
elif self.board.field[i][j] == 2:
printer[i][j] = 'S'
elif self.board.field[i][j] == 3:
printer[i][j] = 'G'
print("Current Board: \n", printer,"\n")
class Board:
def __init__(self, array):
# Conver the board and store
for i in range(array.shape[0]):
for j in range(array.shape[1]):
if array[i][j] == 'X':
array[i][j] = 1
elif array[i][j] == 'O':
array[i][j] = 0
elif array[i][j] == 'S':
array[i][j] = 2
elif array[i][j] == 'G':
array[i][j] = 3
self.field = array.astype(int)
for i in range(self.field.shape[0]):
for j in range(self.field.shape[1]):
if self.field[i][j] == 3:
# Update Field And Set The Goal Point
self.field[i][j] = 0
self.goal = [i, j]
break
| if self.mode == 0:
# Object location
secondbox = [self.positionFirstBox[0], self.positionFirstBox[1] + 2]
firstbox = [self.positionFirstBox[0], self.positionFirstBox[1] + 1]
return [firstbox, secondbox, 1, 0]
elif self.mode == 1:
if self.config == 0:
firstbox = [self.positionFirstBox[0], self.positionSecondBox[1] + 1]
return [firstbox, [], 0, self.config]
if self.config == 1:
positionFirstBox = [self.positionFirstBox[0], self.positionFirstBox[1] + 1]
positionSecondBox = [self.positionSecondBox[0], self.positionSecondBox[1] + 1]
return [positionFirstBox, positionSecondBox, self.mode, self.config] |
11-Building useful classes_.py | class Point3D(object):
|
my_point = Point3D(1,2,3)
print my_point
| def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def __repr__(self):
return "(" + str(self.x) + ", " + str(self.y) + ", " + str(self.z) + ")" |
0005_auto_20190408_1029.py | # Generated by Django 2.1.7 on 2019-04-08 10:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('led', '0004_sensors_action'),
]
operations = [
migrations.AlterField(
model_name='sensors',
name='humidity',
field=models.FloatField(default=0.0, max_length=250),
),
migrations.AlterField(
model_name='sensors',
name='moisture',
field=models.FloatField(default=0.0, max_length=250),
), | name='temperature',
field=models.FloatField(default=0.0, max_length=250),
),
] | migrations.AlterField(
model_name='sensors', |
d-pagination.ts | /*
* Copyright (C) 2019 Toshiba Corporation
* SPDX-License-Identifier: Apache-2.0
*/
import { DButton } from "./d-button";
import {
DLayoutHorizontal,
DLayoutHorizontalOptions,
DThemeLayoutHorizontal
} from "./d-layout-horizontal";
import { DPaginationDynamicButtons } from "./d-pagination-dynamic-buttons";
import { DPaginationNavigationButton } from "./d-pagination-navigation-button";
import { DThemes } from "./theme/d-themes";
export interface DPaginationOptions<THEME extends DThemePagination>
extends DLayoutHorizontalOptions<THEME> {
total: number;
selected?: number;
button?: {
first?: boolean;
last?: boolean;
width?: number;
};
}
export interface DThemePagination extends DThemeLayoutHorizontal {
getButtonWidth(): number;
}
interface DPaginationButtonOption {
first: boolean;
last: boolean;
width?: number;
}
export class DPagination<
THEME extends DThemePagination = DThemePagination,
OPTIONS extends DPaginationOptions<THEME> = DPaginationOptions<THEME>
> extends DLayoutHorizontal<THEME, OPTIONS> {
protected _total!: number;
protected _selected!: number;
protected _buttonOptions!: DPaginationButtonOption;
protected _numberPageButtonVisible!: number;
protected DEFAULT_SELECTED!: number;
protected _firstPageBtn!: DButton;
protected _lastPageBtn!: DButton;
protected _dynamicPageBtns!: DPaginationDynamicButtons;
protected _previousBtn!: DPaginationNavigationButton;
protected _nextBtn!: DPaginationNavigationButton;
protected _goFirstBtn!: DPaginationNavigationButton;
protected _goLastBtn!: DPaginationNavigationButton;
protected init(options: OPTIONS): void {
super.init(options);
this.DEFAULT_SELECTED = 0; // set default selected index page is page 0
// get total pages
this._total = options.total;
// get selected page
this._selected = options.selected ?? this.DEFAULT_SELECTED;
// get button options
const button = options.button;
this._buttonOptions = {
first: !!button?.first,
last: !!button?.last,
width: button?.width
};
this.initButtons(this.getButtonWidth());
this.listenButtonClicked();
this.on("resize", (): void => {
this._numberPageButtonVisible = this.toNumberVisible();
this.update();
});
}
/**
* Set selected page.
*
* @param selected page's index want to select.
*/
set selected(selected: number) {
if (selected < 0 || selected >= this._total || !Number.isInteger(selected)) {
selected = this.DEFAULT_SELECTED;
}
this._selected = selected;
this.update();
}
/**
* Get selected page.
*
* @returns index of selected page.
*/
get selected(): number {
return this._selected;
}
/**
* Set total page.
*
* @param total number of page want to present in pagination.
*/
set total(total: number) {
if (total >= 0 && Number.isInteger(total)) {
this._total = total;
this._numberPageButtonVisible = this.toNumberVisible();
this.selected = this._selected;
this._lastPageBtn.text = this._total;
this.update();
}
}
/**
* Get total pages.
*
* @returns number of total pages.
*/
get total(): number {
return this._total;
}
protected initButtons(width: number): void {
this._previousBtn = new DPaginationNavigationButton({
width,
image: {
source: DThemes.getInstance().getAtlas().mappings
.pagination_navigation_button_previous
}
});
this._nextBtn = new DPaginationNavigationButton({
width,
image: {
source: DThemes.getInstance().getAtlas().mappings.pagination_navigation_button_next
}
});
this._goFirstBtn = new DPaginationNavigationButton({
width,
image: {
source: DThemes.getInstance().getAtlas().mappings
.pagination_navigation_button_go_first
},
visible: this._buttonOptions.first
});
this._goLastBtn = new DPaginationNavigationButton({
width,
image: {
source: DThemes.getInstance().getAtlas().mappings
.pagination_navigation_button_go_last
},
visible: this._buttonOptions.last
});
this._dynamicPageBtns = new DPaginationDynamicButtons({
button: {
width
}
});
this._firstPageBtn = new DButton({
width,
text: {
value: 1
}
});
this._lastPageBtn = new DButton({
width,
text: {
value: this._total
}
});
this.addChild(this._goFirstBtn);
this.addChild(this._previousBtn);
this.addChild(this._firstPageBtn);
this.addChild(this._dynamicPageBtns);
this.addChild(this._lastPageBtn);
this.addChild(this._nextBtn);
this.addChild(this._goLastBtn);
}
protected listenButtonClicked(): void {
this._firstPageBtn.on("active", (btn: DButton) => {
this.onClickPageButton(btn);
});
this._lastPageBtn.on("active", (btn: DButton) => {
this.onClickPageButton(btn);
});
this._dynamicPageBtns.on("active", (btn: DButton) => {
this.onClickPageButton(btn);
});
this._goFirstBtn.on("active", (btn: DButton) => {
this.selected = this.DEFAULT_SELECTED;
});
this._goLastBtn.on("active", (btn: DButton) => {
this.selected = this._total - 1;
});
this._nextBtn.on("active", (btn: DButton) => {
if (this.selected !== this._total + 1) {
this.selected = this._selected + 1;
}
});
this._previousBtn.on("active", (btn: DButton) => {
if (this._selected !== 0) {
this.selected = this._selected - 1;
}
});
}
protected update(): void {
let startDynamic: number;
let endDynamic: number;
let dotsLeft: boolean;
let dotsRight: boolean;
let numberButtonsInLeft = 0;
let numberButtonsInRight = 0;
this.updateStaticButtons();
// Number displayed buttons from first button to selected button when select center button of all buttons.
// Not including selected button.
const numberButtonsFirstToCenter = Math.ceil((this._numberPageButtonVisible - 1) * 0.5);
const numberButtonsCenterToEnd = Math.floor((this._numberPageButtonVisible - 1) * 0.5);
if (this._selected < numberButtonsFirstToCenter) {
numberButtonsInLeft = this._selected;
numberButtonsInRight = this._numberPageButtonVisible - numberButtonsInLeft - 1;
} else if (this._selected + numberButtonsCenterToEnd > this._total - 1) {
numberButtonsInRight = this._total - 1 - this.selected;
numberButtonsInLeft = this._numberPageButtonVisible - numberButtonsInRight - 1;
} else {
numberButtonsInLeft = numberButtonsFirstToCenter;
numberButtonsInRight = numberButtonsCenterToEnd;
}
if (this._selected <= numberButtonsInLeft) {
startDynamic = 1;
dotsLeft = false; |
if (this._selected + numberButtonsInRight >= this._total - 1) {
endDynamic = this._total - 2;
dotsRight = false;
} else {
endDynamic = this._selected + numberButtonsInRight - 2;
dotsRight = true;
}
this._dynamicPageBtns.update({
start: startDynamic,
end: endDynamic,
selected: this._selected,
button: {
width: this.getButtonWidth(),
dotsLeft,
dotsRight
}
});
}
protected updateStaticButtons(): void {
if (this._total > 0) {
this._firstPageBtn.show();
} else {
this._firstPageBtn.hide();
}
if (this._total > 1) {
this._lastPageBtn.show();
} else {
this._lastPageBtn.hide();
}
const isFirst = this._selected === this.DEFAULT_SELECTED;
const isLast = this._selected === this._total - 1 || this._total === 0;
this._firstPageBtn.state.isActive = isFirst;
this._lastPageBtn.state.isActive = isLast;
this._goFirstBtn.state.isDisabled = isFirst;
this._previousBtn.state.isDisabled = isFirst;
this._nextBtn.state.isDisabled = isLast;
this._goLastBtn.state.isDisabled = isLast;
}
protected getButtonWidth(): number {
return this._buttonOptions.width ? this._buttonOptions.width : this.theme.getButtonWidth();
}
protected toNumberVisible(): number {
let numberNavigationBtn = 2; // 2 buttons always displayed are "next" and "previous" button
if (this._buttonOptions.first) {
numberNavigationBtn++;
}
if (this._buttonOptions.last) {
numberNavigationBtn++;
}
const widthOfNavigationBtns =
numberNavigationBtn * (this.getButtonWidth() + this._margin.horizontal * 2);
const widthOfPageBtns = this.width - widthOfNavigationBtns;
const numberVisible = Math.floor(
widthOfPageBtns / (this.getButtonWidth() + this._margin.horizontal * 2)
);
/* set numberVisible is 5, if it less than 5
If total pages less than numberVisible, set numberVisible equal total pages
**/
return Math.min(this._total, Math.max(numberVisible, 5));
}
protected onClickPageButton(btn: DButton): void {
const btnIndex = Number(btn.text) - 1;
if (this._selected !== btnIndex) {
this._selected = btnIndex;
this.update();
}
}
protected getType(): string {
return "DPagination";
}
} | } else {
startDynamic = this._selected - numberButtonsInLeft + 2;
dotsLeft = true;
} |
api.rs | use actix_http::{encoding::Decoder, Payload};
use actix_web::client::{Client, ClientResponse};
use actix_web::http::header;
use anyhow::{bail, Result};
use chrono::{Datelike, Local, NaiveDate, NaiveDateTime};
use serde::Deserialize;
use serde::Serialize;
use std::{
collections::{HashMap, HashSet},
time::Duration,
};
pub async fn get_graph_data(access_token: &str) -> ClientResponse<Decoder<Payload>> {
let query = r#"{"query" : "query { viewer { contributionsCollection { contributionCalendar { weeks { firstDay contributionDays { contributionCount } } } } } }"}"#;
let client = Client::default();
let response = client
.post("https://api.github.com/graphql")
.header("Content-type", "application/json")
.header("User-Agent", "actix-web/3.0")
.bearer_auth(access_token)
.send_body(query)
.await
.unwrap();
response
}
pub async fn parse_graph_response(mut res: ClientResponse<Decoder<Payload>>) -> Result<Vec<Week>> {
let data_github: Data =
serde_json::from_str(&String::from_utf8(res.body().await.unwrap().to_vec()).unwrap())?;
Ok(data_github
.data
.viewer
.contributionsCollection
.contributionCalendar
.weeks)
}
pub async fn get_user_id(access_token: &str) -> Result<String> {
let query = r#"{"query": "query { viewer { login }}""#;
let client = Client::new();
let mut response = client
.post("https://api.github.com/graphql")
.header("Content-type", "application/json")
.header("User-Agent", "actix-web/3.0")
.bearer_auth(access_token)
.send_body(query)
.await
.unwrap();
let user_id: UserID =
serde_json::from_str(&String::from_utf8(response.body().await.unwrap().to_vec()).unwrap())?;
Ok(user_id.data.viewer.login)
}
pub async fn get_atcoder_graph_data(user_id: &str, show_mode: ShowMode) -> Result<Vec<usize>> {
if user_id.is_empty() {
bail!("no input");
}
const ATCODER_API_URL: &str = "https://kenkoooo.com/atcoder/atcoder-api/results?user=";
let client = Client::default();
let response = client
.get(format!("{}{}", ATCODER_API_URL, user_id))
.header(header::ACCEPT_ENCODING, "gzip")
.timeout(Duration::from_secs(60))
.send()
.await;
let body = match response {
Ok(mut response) => response.body().limit(2048 * 2048 * 126).await,
Err(_) => {
bail!("Probably an invalid username");
}
};
// let submissions = response.unwrap().json::<Vec<Submission>>().await; // 一生Paylaod(overflow) 直せん
let submissions: Vec<Submission> = serde_json::from_slice(&body.unwrap().to_vec())?;
let (first_day, last_day, dates, date_to_idx) = create_dates_data().await;
let mut counts = vec![0; dates.len()];
const NINE_HOUR: i64 = 32400;
match show_mode {
ShowMode::Submissions => {
for sub in submissions {
let date = NaiveDateTime::from_timestamp(sub.epoch_second + NINE_HOUR, 0).date();
if date < first_day || last_day < date {
continue;
}
let idx = date_to_idx[&date];
counts[idx] += 1;
}
}
ShowMode::AC => {
for sub in submissions {
let date = NaiveDateTime::from_timestamp(sub.epoch_second + NINE_HOUR, 0).date();
if date < first_day || last_day < date || sub.result != "AC" {
continue;
}
let idx = date_to_idx[&date];
counts[idx] += 1;
}
}
ShowMode::UniqueAC => {
// AC済みのsubだけに絞る
// epoch_secondでソート
// problem_idを覚えるHashSetを作る
// AC時間の早い提出からcountしていく
// countするときはcountするprobelm_idがまだHashSetにないことを確認
// countしたらHashSetにproblem_idをいれる
let mut submissions = submissions
.into_iter()
.filter(|sub| sub.result == "AC")
.collect::<Vec<Submission>>();
submissions.sort_by_key(|sub| sub.epoch_second);
let mut counted_problems = HashSet::new();
for sub in submissions {
if !counted_problems.contains(&sub.problem_id) {
counted_problems.insert(sub.problem_id);
let date =
NaiveDateTime::from_timestamp(sub.epoch_second + NINE_HOUR, 0).date();
if date < first_day {
continue;
}
let idx = date_to_idx[&date];
counts[idx] += 1;
}
}
}
}
Ok(counts)
}
async fn create_dates_data() -> (
NaiveDate,
NaiveDate,
Vec<NaiveDate>,
HashMap<NaiveDate, usize>,
) {
const WEEKS: i64 = 53;
const WEEKDAY: i64 = 7;
let last_day = Local::today().naive_local();
let mut next_sunday = last_day.succ();
while next_sunday.weekday() != chrono::Weekday::Sun {
next_sunday = next_sunday.succ();
}
let first_day = next_sunday - chrono::Duration::days(WEEKS * WEEKDAY);
let mut day = first_day;
let mut dates = vec![];
let mut date_to_idx = HashMap::new();
for i in 0..WEEKS * WEEKDAY {
date_to_idx.insert(day, i as usize);
dates.push(day);
if day == last_day {
break;
}
day = day.succ();
}
(first_day, last_day, dates, date_to_idx)
}
#[allow(clippy::upper_case_acronyms)]
#[derive(Deserialize, Debug)]
pub enum ShowMode {
Submissions,
AC,
UniqueAC,
}
#[derive(Deserialize, Debug)]
pub struct AtCoderData {
pub submissions: Vec<Submission>,
}
#[derive(Deserialize, Debug)]
pub struct Submission {
id: i64,
pub epoch_second: i64,
pub problem_id: String,
contest_id: String,
user_id: String,
language: String,
point: f64,
length: usize,
pub result: String,
execution_time: Option<i64>,
}
#[derive(Deserialize, Debug)]
#[allow(clippy::upper_case_acronyms)]
pub struct UserID {
data: UserViewer,
}
#[derive(Deserialize, Debug)]
struct UserViewer {
viewer: Login,
}
| #[derive(Deserialize, Debug)]
struct Login {
login: String,
}
#[derive(Deserialize, Debug)]
pub struct Data {
data: Viewer,
}
#[derive(Deserialize, Debug)]
struct Viewer {
viewer: ContributionCollection,
}
#[allow(non_snake_case)]
#[derive(Deserialize, Debug)]
struct ContributionCollection {
contributionsCollection: ContributionCalendar,
}
#[allow(non_snake_case)]
#[derive(Deserialize, Debug)]
struct ContributionCalendar {
contributionCalendar: Weeks,
}
#[derive(Deserialize, Debug, Serialize)]
struct Weeks {
weeks: Vec<Week>,
}
#[allow(non_snake_case)]
#[derive(Deserialize, Debug, Serialize)]
pub struct Week {
pub firstDay: String,
pub contributionDays: Vec<ContributionCount>,
}
#[allow(non_snake_case)]
#[derive(Deserialize, Debug, Serialize)]
pub struct ContributionCount {
pub contributionCount: i32,
} | |
lib.rs |
#![recursion_limit="512"]
#[macro_use]
extern crate serde_derive;
extern crate serde;
#[cfg_attr(test, macro_use)]
extern crate serde_json;
#[macro_use]
extern crate log;
extern crate hubcaps;
extern crate hyper;
extern crate hyper_native_tls;
extern crate either;
extern crate lru_cache;
extern crate tempfile;
extern crate amqp;
extern crate fs2;
extern crate md5;
extern crate uuid;
extern crate env_logger;
use std::env;
pub mod acl;
pub mod checkout;
pub mod locks;
pub mod clone;
pub mod worker;
pub mod config;
pub mod message;
pub mod tasks;
pub mod evalchecker;
pub mod nix;
pub mod stats;
pub mod ghevent;
pub mod commentparser;
pub mod commitstatus;
pub mod outpathdiff;
pub mod tagger;
pub mod asynccmd;
pub mod notifyworker;
pub mod writetoline;
pub mod test_scratch;
pub mod easyamqp;
pub mod ofborg {
pub use asynccmd;
pub use stats;
pub use config;
pub use checkout;
pub use locks;
pub use clone;
pub use worker;
pub use notifyworker;
pub use message;
pub use tasks;
pub use evalchecker;
pub use commitstatus;
pub use ghevent;
pub use nix;
pub use acl;
pub use commentparser;
pub use outpathdiff;
pub use tagger;
pub use writetoline;
pub use test_scratch;
pub use easyamqp;
pub const VERSION: &'static str = env!("CARGO_PKG_VERSION");
}
pub fn | () {
if let Err(_) = env::var("RUST_LOG") {
env::set_var("RUST_LOG", "info");
env_logger::init().unwrap();
info!("Defaulting RUST_LOG environment variable to info");
} else {
env_logger::init().unwrap();
}
}
| setup_log |
authentication.service.ts | import { Injectable } from "@angular/core";
import { HttpClient, HttpHeaders } from "@angular/common/http";
import { Observable } from "rxjs";
import { map } from "rxjs/operators";
import { User } from "../_models/User";
const httpOptions = {
headers: new HttpHeaders({
"Content-Type": "application/json",
}),
};
| public currentUser: User;
constructor(private http: HttpClient) {
this.currentUser = JSON.parse(sessionStorage.getItem("currentUser"));
}
public get currentUserValue(): User {
return this.currentUser;
}
public setNewToken(token: string): void {
const user = this.currentUserValue || new User();
user.token = token;
sessionStorage.setItem("currentUser", JSON.stringify(user));
this.currentUser = user;
}
public register(): Observable<any> {
const action = "authenticate";
return this.http
.post<any>(
"/api/auth/register",
{
action,
},
httpOptions,
)
.pipe(
map((user) => {
// login successful if there's a jwt token in the response
if (user && user.token) {
// store user details and jwt token in local storage
// to keep user logged in between page refreshes
sessionStorage.setItem("currentUser", JSON.stringify(user));
this.currentUser = user;
}
return user;
}),
);
}
public registerAgain(): Observable<any> {
return this.register();
}
public logout(): void {
// remove user from local storage to log user out
sessionStorage.removeItem("currentUser");
this.currentUser = null;
}
} | @Injectable({ providedIn: "root" })
export class AuthenticationService {
// private currentUserSubject: BehaviorSubject<User>;
// public currentUser: Observable<User>; |
kendo.buttongroup.min.js | /**
* Kendo UI v2020.2.617 (http://www.telerik.com/kendo-ui)
* Copyright 2020 Progress Software Corporation and/or one of its subsidiaries or affiliates. All rights reserved.
*
* Kendo UI commercial licenses may be obtained at
* http://www.telerik.com/purchase/license-agreement/kendo-ui-complete
* If you do not own a commercial license, this file shall be governed by the trial license terms.
*/
!function(e,define){define("kendo.buttongroup.min",["kendo.core.min","kendo.badge.min"],e)}(function(){return function(e,t){function | (n,s){var a;null!==n&&n!==t&&(n.constructor!==Object&&(n={text:n}),n.position!==t&&""!==n.position||(n.position="inline"),n._classNames=["k-button-badge"],s.addClass("k-badge-container"),a=e("<span />").appendTo(s),s.badge=new i.Badge(a,n))}var s=window.kendo,i=s.ui,a=i.Widget,o=s.keys,d=e.proxy,l=s.template,r=".kendoButtonGroup",c="k-widget",u="k-button-group",f="k-button",m="k-button-icontext",p="k-button-icon",h="k-state-active",b="k-state-focused",g="k-state-disabled",k="select",C="click",v="keydown",y="focus",x="blur",_="mousedown",w={item:l('<span #= item.enabled === false ? "disabled" : "" # >#= icon(iconClass) ##= image(item) ##= text #</span>'),image:l('<img alt="icon" src="#=data.imageUrl#" />'),icon:l('<span class="#=data#"></span>'),empty:l("")},I=a.extend({init:function(t,n){var s=this;a.fn.init.call(s,t,n),s.wrapper=s.element,s.options.items&&s._renderItems(s.options.items),s.selectedIndices=[],s.element.addClass(c+" "+u).attr("role","group").attr("tabindex",s.element.attr("tabindex")||"0").children().each(function(){var t=e(this);s._updateClasses.bind(s)(t)}),s._enable=!0,s.options.enable&&s.options.enabled||(s._enable=!1,s.element.attr("aria-disabled",!0).addClass(g)),s.select(s.options.index),s.element.on(C+r,"."+f,d(s._click,s)).on(y+r,d(s._focus,s)).on(v+r,d(s._keyDown,s)).on(x+r,function(){s.preventFocus=!1,s.element.find("."+f).removeClass(b)}).on(_+r,function(){s.preventFocus=!0})},events:[k],options:{name:"ButtonGroup",selection:"single",index:-1,enable:!0,enabled:!0},current:function(){return this.element.find("."+h)},_renderItems:function(t){var i=this;t.forEach(function(t){var a=e(w.item({image:t.imageUrl?w.image:w.empty,icon:t.imageUrl||!t.iconClass&&!t.icon?w.empty:w.icon,iconClass:t.iconClass||"k-icon k-i-"+t.icon,item:t,text:t.text?t.encoded===!1?t.text:s.htmlEncode(t.text):""}));t.attributes&&a.attr(t.attributes),t.selected&&a.addClass(h),(t.iconClass||t.icon||t.imageUrl)&&a.addClass(t.text?"k-button-icontext":"k-button-icon"),t.badge&&n(t.badge,a),a.appendTo(i.element)})},_focus:function(){var t=e(this.element);this.preventFocus||(t.find("."+h).length?t.find("."+h).first().focus().addClass(b):t.children().first().focus().addClass(b))},_keyDown:function(t){var n,i=this,a=e(i.element),d=a.find("."+f),l=a.find("."+b),r=d.index(l),c=s.support.isRtl(i.element);t.keyCode===o.LEFT&&!c||t.keyCode===o.RIGHT&&c?(l.removeClass(b),n=0===r?d.eq(d.length-1):e(d[r-1]),n.focus().addClass(b),t.preventDefault()):t.keyCode===o.LEFT&&c||t.keyCode===o.RIGHT&&!c?(l.removeClass(b),n=r+1===d.length?d.eq(0):e(d[r+1]),n.focus().addClass(b),t.preventDefault()):t.keyCode!==o.ENTER&&t.keyCode!==o.SPACEBAR||(i._select(l),t.preventDefault())},select:function(n){var s,i=this,a=-1;n!==t&&n!==-1&&(i.element.find("."+f).removeClass(b),"number"==typeof n?(a=n,n=i.element.children().eq(n)):n.nodeType&&(n=e(n),a=n.index()),"multiple"===i.options.selection?(s="true"===n.attr("aria-pressed"),n.attr("aria-pressed",!s).toggleClass(h),i.selectedIndices.indexOf(a)===-1?i.selectedIndices.push(a):i.selectedIndices.splice(i.selectedIndices.indexOf(a),1)):(i.selectedIndices=[],i.current().attr("aria-pressed",!1).removeClass(h),n.attr("aria-pressed",!0).addClass(h),i.selectedIndices.push(a)),i.trigger(k,{indices:i.selectedIndices}))},badge:function(e,i){var a,o=this.element,d=isNaN(e)?o.find(e):o.children().eq(e),l=i||0===i;if(d.length){if(a=d.children(".k-badge").eq(0).data("kendoBadge"),!a&&l)return n({value:s.htmlEncode(i)},d),s.htmlEncode(i);if(l)a.text(s.htmlEncode(i));else if(i===!1)return a.element.empty().remove(),a.destroy(),t;return a?a.text():null}},enable:function(e){t===e&&(e=!0),this.element.attr("aria-disabled",!e).toggleClass(g,!e),this._enable=this.options.enable=e},destroy:function(){var t=this;t.element.off(r),t.element.find(".k-badge").each(function(){e(this).data("kendoBadge").destroy()}),a.fn.destroy.call(t)},_updateClasses:function(t){var i=s.attrValue(t,"icon"),a=s.attrValue(t,"badge"),o=t.find("img").addClass("k-image"),d=!0;t.attr("aria-pressed",!1).attr("role","button").addClass(f),(t.is("[disabled]")||t.hasClass(g))&&t.addClass(g).attr("aria-disabled",!0).removeAttr("disabled"),t.is("."+h)&&(t.removeClass(h),(!t.hasClass(g)&&"single"===this.options.selection||"multiple"===this.options.selection)&&this.select(t[0])),!o[0]&&i&&t.prepend(e(w.icon("k-icon k-i-"+i))),t.contents().filter(function(){return!e(this).hasClass("k-icon")&&!e(this).hasClass("k-image")}).each(function(){(1==this.nodeType||3==this.nodeType&&s.trim(this.nodeValue).length>0)&&(d=!1)}),(o[0]||i)&&t.addClass(d?p:m),(a||0===a)&&n(a,t)},_click:function(t){var n=e(t.target).closest("."+f);t.isDefaultPrevented()||this._select(n)},_select:function(e){var n=e;return this.element.find("."+f).removeClass(b),!this._enable||n.is("."+g)?(n.addClass(b),t):(this.select(e[0]),n.addClass(b),t)}});i.plugin(I)}(window.kendo.jQuery),window.kendo},"function"==typeof define&&define.amd?define:function(e,t,n){(n||t)()});
//# sourceMappingURL=kendo.buttongroup.min.js.map
| n |
s0004_median_of_two_sorted_arrays.rs | #![allow(unused)]
pub struct Solution {}
// https://leetcode.com/problems/median-of-two-sorted-arrays/description/
use std::cmp::{max, min};
impl Solution {
pub fn find_median_sorted_arrays(nums1: Vec<i32>, nums2: Vec<i32>) -> f64 {
if nums1.len() > nums2.len() {
return Solution::find_median_sorted_arrays(nums2, nums1);
}
let m = nums1.len();
let n = nums2.len();
let mut left = 0;
let mut right = m;
let mut ansi: i32 = -1;
let mut median1 = 0;
let mut median2 = 0;
while left <= right {
let i = (left + right) / 2;
let j = (m + n +1) / 2 - i;
let nums_im1 = if i == 0 {
std::i32::MIN
} else {
nums1[i -1]
};
let nums_i = if i == m {
std::i32::MAX
} else {
nums1[i]
};
let nums_jm1 = if j == 0 {
std::i32::MIN
} else {
nums2[j -1]
};
let nums_j = if j == n {
std::i32::MAX
} else {
nums2[j]
};
if nums_im1 <= nums_j {
ansi = i as i32;
median1 = max(nums_im1, nums_jm1);
median2 = min(nums_i, nums_j);
left = i + 1;
}else {
right = i - 1;
}
}
let res = if (m + n) % 2 == 0 { | } else {
median1 as f64
};
res
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_4() {
assert_eq!(
Solution::find_median_sorted_arrays(vec![1, 3], vec![2]), 2.0);
assert_eq!(
Solution::find_median_sorted_arrays(vec![1, 2], vec![3, 4]), 2.5);
}
} | (median1 + median2) as f64 / 2.0 |
async_twisted_client_serial.py | #!/usr/bin/env python
"""
Pymodbus Asynchronous Client Examples
--------------------------------------------------------------------------
The following is an example of how to use the asynchronous serial modbus
client implementation from pymodbus with twisted.
"""
from twisted.internet import reactor
from pymodbus.client.asynchronous import schedulers
from pymodbus.client.asynchronous.serial import AsyncModbusSerialClient
from pymodbus.client.asynchronous.twisted import ModbusClientProtocol
import logging
logging.basicConfig()
log = logging.getLogger("pymodbus")
log.setLevel(logging.DEBUG)
# ---------------------------------------------------------------------------#
# state a few constants
# ---------------------------------------------------------------------------#
SERIAL_PORT = "/dev/ptyp0"
STATUS_REGS = (1, 2)
STATUS_COILS = (1, 3)
CLIENT_DELAY = 1
UNIT = 0x01
class ExampleProtocol(ModbusClientProtocol):
def __init__(self, framer):
""" Initializes our custom protocol
:param framer: The decoder to use to process messages
:param endpoint: The endpoint to send results to
"""
ModbusClientProtocol.__init__(self, framer)
log.debug("Beginning the processing loop")
reactor.callLater(CLIENT_DELAY, self.fetch_holding_registers)
def fetch_holding_registers(self):
""" Defer fetching holding registers
"""
log.debug("Starting the next cycle")
d = self.read_holding_registers(*STATUS_REGS, unit=UNIT)
d.addCallbacks(self.send_holding_registers, self.error_handler)
def send_holding_registers(self, response):
""" Write values of holding registers, defer fetching coils
:param response: The response to process
"""
log.info(response.getRegister(0))
log.info(response.getRegister(1))
d = self.read_coils(*STATUS_COILS, unit=UNIT)
d.addCallbacks(self.start_next_cycle, self.error_handler)
def start_next_cycle(self, response):
""" Write values of coils, trigger next cycle
:param response: The response to process
"""
log.info(response.getBit(0))
log.info(response.getBit(1))
log.info(response.getBit(2))
reactor.callLater(CLIENT_DELAY, self.fetch_holding_registers)
def error_handler(self, failure):
""" Handle any twisted errors
:param failure: The error to handle
"""
log.error(failure)
if __name__ == "__main__":
proto, client = AsyncModbusSerialClient(schedulers.REACTOR,
method="rtu", | timeout=2,
proto_cls=ExampleProtocol)
proto.start()
# proto.stop() | port=SERIAL_PORT, |
day10.rs | use std::io::Read;
use crate::common::from_lines;
use crate::Solution;
#[derive(Default)]
pub struct Day10;
impl Solution for Day10 {
fn part1(&mut self, input: &mut dyn Read) -> String |
fn part2(&mut self, input: &mut dyn Read) -> String {
let mut adapters: Vec<u32> = from_lines(input);
adapters.push(0);
adapters.sort_unstable();
let mut methods = vec![0u64; adapters.len()];
methods[0] = 1;
for (i, a) in adapters.iter().copied().enumerate() {
let c = methods[i];
for (j, b) in adapters[i..].iter().enumerate().skip(1) {
if b - a <= 3 {
methods[i + j] += c;
} else {
break;
}
}
}
methods.last().unwrap().to_string()
}
}
#[cfg(test)]
mod tests {
use crate::test_implementation;
use super::*;
const SAMPLE: &[u8] = include_bytes!("../samples/10.txt");
const SAMPLE2: &[u8] = include_bytes!("../samples/10.2.txt");
#[test]
fn sample_part1() {
test_implementation(Day10, 1, SAMPLE, 35);
test_implementation(Day10, 1, SAMPLE2, 220);
}
#[test]
fn sample_part2() {
test_implementation(Day10, 2, SAMPLE, 8);
test_implementation(Day10, 2, SAMPLE2, 19208);
}
}
| {
let mut adapters: Vec<u32> = from_lines(input);
// Outlet
adapters.push(0);
adapters.sort_unstable();
let device = *adapters.last().unwrap() + 3;
adapters.push(device);
let mut differences = [0u32; 4];
for window in adapters.windows(2) {
differences[(window[1] - window[0]) as usize] += 1;
}
(differences[1] * differences[3]).to_string()
} |
b0xfile_assets_yc_ml_translate.png.go | // Code generaTed by fileb0x at "2020-09-25 22:49:51.539473 +0300 MSK m=+1.346037030" from config file "b0x.yml" DO NOT EDIT.
// modified(2020-09-25 22:44:26 +0300 MSK)
// original path: ../../assets/yc/ml/translate.png
package assets
import (
"bytes"
"compress/gzip"
"io"
"os"
)
// FileAssetsYcMlTranslatePng is "assets/yc/ml/translate.png"
var FileAssetsYcMlTranslatePng = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x4c\x7a\x79\x38\x94\xef\xf7\xff\x79\x66\xc1\x90\x88\x10\x95\x19\xd9\xca\x52\x92\x25\xc4\x0c\xd9\xcb\x52\xd9\xf7\x52\xb2\x64\x29\x45\xb2\xcc\x83\x4a\x94\x50\xa4\x92\x3d\x29\x25\x09\x2d\xb2\x4c\x1b\x42\xd9\x42\x59\x27\x29\xfb\xbe\x0d\x66\xf9\x5d\x7d\xde\xbf\x3f\xbe\x73\x5d\xf7\x75\xcd\xeb\x9c\xf3\x9c\xe7\x7e\xae\xfb\xdc\xaf\xd7\xf9\xe3\xc4\x5b\x5b\x9a\xf0\xf3\x4a\xf0\x02\x00\xbf\x99\xa9\xe1\x51\x00\x0c\xfc\x5b\x3c\x5c\x00\xb0\x78\xf1\xfb\x1a\x00\x10\x82\x4c\x1d\x83\x01\x44\x65\xff\x2d\xc4\xec\xa8\xcc\x2c\x00\x20\xc1\x47\x4d\x0c\xe0\xf9\xb7\x6d\x63\x00\x80\x3b\xad\x6f\xa1\x0f\x50\x9a\xc4\xc7\x3c\x8e\x07\xd0\xd8\x65\x66\xa8\x6f\x73\x11\x99\xea\xb7\x08\x72\xed\x4c\x1b\x9d\xab\x68\x49\xbb\x91\x98\x69\x98\x1c\x35\xc7\x6d\x63\xf4\xea\x9e\xfe\x8b\xc7\xbc\xb1\x77\x63\xae\xd5\xdd\x0a\xf7\xca\x30\x9b\x2f\x92\x50\x29\x9b\x95\xe3\xe9\x3d\xec\x26\xa0\x62\xbb\xe7\x76\x9d\xc6\xfc\xc3\xaf\x1b\x84\x43\x6d\x47\xf8\x31\x5e\xa2\x87\xa6\x8d\xa5\x44\x54\x55\x6c\x1b\x6c\xee\x1e\x17\xb4\x45\x3c\xed\x25\x0c\x63\x4a\x63\xce\xb7\x4c\x97\x3d\x8f\xf8\xe3\x37\xe6\x36\x36\xb0\xf0\x76\x45\x8b\xe3\x9e\xd6\x64\xac\xe1\x6e\xec\x5d\x5c\xdd\xd5\xe3\xb7\x15\xa1\x57\xfd\x2a\x6d\x8a\x65\x1d\x25\xbe\x38\xba\x55\x25\x7a\xc3\x78\xfd\x33\xa5\xa8\x40\x19\xc6\xf1\xdf\xe1\x9e\xf9\xd9\xe9\xd1\x9f\x93\x94\xd9\x2e\xc7\x76\x7a\xd1\xae\x6d\x4f\xde\x6d\x44\x65\x1f\x5e\xa8\xef\x6a\x38\xb3\xa8\x99\x91\x96\x41\xbc\xeb\xc8\x5c\x8f\xcb\x56\xa3\x16\xef\xfd\x75\xc6\x41\xc0\x2b\xf1\xfb\xe9\x91\x02\x0d\x1d\xf5\x9a\x0b\xc6\xb0\xff\x2b\x0f\xfa\x61\x43\x52\xe4\x81\xd6\x3d\xac\xd8\x47\x5e\x5c\x07\x9d\xb7\xde\xf8\x30\x4e\x91\xce\xd9\xaa\xf3\x58\xbf\xaa\x9b\x36\x25\xa4\xe8\xa5\x61\x6e\xee\x25\xf1\xe5\x60\xaa\x9a\x92\xdc\x06\x2f\x0d\x95\x25\x71\xe7\xd7\xfb\x16\x54\xc9\xfb\xd4\x6b\x22\xed\x60\x73\x0c\x0e\x1a\xee\x66\x0d\xac\xe7\x79\x3c\x97\x0f\x15\xbe\xeb\xaa\xa1\x86\x15\xfa\x21\xe5\xf7\x35\xa7\xe4\xbd\xae\xf2\x57\x99\xf7\x3f\x1e\x0f\x79\x9c\x94\xdb\xf0\x47\x98\x30\xb5\x18\xd3\x6c\x71\x7b\x1b\xfc\xdc\x81\xc0\xd1\xbc\xe8\x28\x57\xe5\x6d\xed\xb7\xab\x36\x95\x5d\x27\x65\x1f\x96\xd6\x12\xd4\xd9\x1e\x99\x72\x9f\xbf\x98\x19\xb0\x77\x6f\xb2\x78\x0f\x1f\x45\xdb\x14\xd0\x2d\xc6\x75\xec\x03\x32\xfa\xfe\x8f\xcf\xe6\xbd\x56\xdd\x8c\x7f\x2a\x85\x89\xd4\xbb\x23\x84\x95\xee\x38\x31\x51\xdb\x11\xf2\x61\x98\xd1\xdf\x81\xc9\x91\x97\x01\x9a\x84\x04\xf6\x62\xf4\x06\xe1\xc9\x4c\x4d\xa3\xa3\x70\x3e\x09\x07\xde\xb9\x07\xc3\x22\x5f\xea\x69\xf7\x67\xe4\xd2\x1a\xac\x0e\xee\xfd\x88\x0b\x27\xfb\x14\x90\x04\x75\x3a\xcf\x10\x1f\x10\x07\xb5\x65\xe1\x4e\x32\xe4\xb4\xdf\x47\xc8\x3c\x31\xd9\x4e\x9e\x71\x5a\xfd\xe9\x03\xa9\x15\x1a\xe6\x3c\x27\x13\x2e\x83\xad\x09\x06\x6d\xfc\xb9\x2f\x24\xcc\xc1\x8a\x71\xe4\xed\x45\x22\x52\x3b\x9f\x77\x99\x32\xf0\xd0\x00\x55\xd8\x81\x50\xc6\xd4\xa7\x7b\xb8\x27\x56\xae\x8f\xee\x72\x5f\x16\x9e\x6d\xb3\x2c\x3b\x25\x68\xcb\x33\xf0\x20\x49\x01\xd7\xa7\x41\xf9\x99\x02\x39\xa9\x8b\xa9\x83\x4a\x13\x26\xd4\xfd\x4a\x94\x92\x1b\x87\x7b\x5b\x6b\x7c\x3e\xac\x39\x72\xf1\x1e\x31\x96\x81\x9d\x89\xc6\xad\x33\xfb\xd6\x0d\xfc\x36\x9e\x6b\x34\x69\xf4\xca\xdb\x93\x7e\xec\x95\x64\x8d\xf1\x53\x38\xa5\x8f\x81\x40\x75\x6e\xc6\x61\xc1\x03\x46\x3e\xc4\x30\xc7\xbe\x83\xa4\xd3\x89\x4a\x55\x5a\x67\x99\xcf\xcb\x30\x17\x13\x70\xe0\x26\x35\xfa\x51\x56\x06\x3b\x2b\x4e\x80\x2a\x4a\x18\x51\xf2\x6e\xae\x05\x64\x27\x03\xcd\x51\xda\xd3\x3e\xff\x76\x4d\xbf\x98\x84\xb8\xf4\xe8\xc7\x7d\xfb\xea\x1a\x7e\x06\x8a\x4d\x9a\x05\x08\xd3\x6c\x65\x20\xc8\x3c\xff\x13\xf1\xcc\xfe\xc7\xa1\x8b\x57\xc6\x22\x53\x3f\xa6\x3a\xac\x4b\xd8\xf3\xfd\x92\x10\x82\xfd\xd1\x42\xe4\xa2\xf7\x0f\x1e\x19\x81\x7d\x32\xe4\xa8\x5c\xea\x71\x17\xeb\x63\x3f\x53\x0f\xa1\x2e\xc6\x1d\xc4\xd8\xd4\x1f\xb6\x11\xb9\x98\x52\x04\x09\x86\x18\xb4\xef\x6c\x0f\xb7\xd3\xbd\x8e\xa3\xd9\xe4\x2f\x2f\xa5\x86\x13\x71\xf0\x36\xec\xcc\x64\x07\x4b\xbf\x78\xf9\x59\x6b\x2f\x77\x8f\x4f\x33\xef\xe1\xac\x1f\x97\x9c\xd0\x6d\xc9\x40\x3a\xf6\x55\x95\x7c\x9b\x49\xe9\xf6\x16\x20\x89\xc9\x40\xc3\xa7\xd9\x1f\x7f\xde\x5f\xd0\x9f\xdd\x1c\x4d\x34\xfd\xf0\x20\xab\x24\x6e\xcf\x2d\x1e\x12\x9f\x0c\x98\xa6\x2d\xaf\x58\x7c\xad\x22\xa5\xae\x46\x6f\x4e\xe1\x21\x45\xca\x80\x29\x3e\x9a\xd8\xf0\x39\xb5\xb8\x57\xec\xad\xf1\x35\xb5\x64\x20\x6d\xc2\x90\xad\xaf\xed\x92\x5e\xba\x5d\x45\x46\x65\x4c\x41\x90\x2f\x9a\x18\x64\xe0\xe9\x91\x9c\x81\xbd\x16\x8b\x03\x5d\xa9\x59\xac\xe5\x8e\x4d\x94\x16\x53\x68\x15\xc1\x90\xe9\x78\x2f\x5c\xab\xba\x10\x3c\x4b\xa0\x09\x30\xe4\xd7\x3e\x44\x2d\xf7\x56\x46\xde\x74\xc6\x3a\xf0\x13\xa0\xa7\x3d\x9c\xc9\xaa\x20\xf1\x32\x97\x73\x60\x76\x73\x37\x77\x9b\x38\x01\x02\xda\xe6\x29\x2d\x47\x10\x6b\x21\x19\x8c\x43\x3c\x0e\x0e\x19\x0b\xb8\x86\x45\x7f\x8d\xa4\xf8\x7c\xc5\xd9\x59\x66\xe9\x94\x9e\x72\x01\x29\x04\x55\x5a\x5a\x75\x4f\x8c\xda\xbf\x13\x1a\x6f\x6e\xc5\x7a\x93\x90\xc7\x2f\x3e\xeb\x51\xa5\x67\x05\xf8\x50\xf3\xe3\xd6\x82\x32\x20\x52\xfe\x63\x64\x90\x11\xa3\xfe\xeb\x2e\x3c\x56\xf3\x8c\x99\x0b\x14\x1b\x97\x37\xc3\x15\xc5\xe1\x72\x4e\x18\x6f\x74\xd5\xf3\xb6\x98\x55\x8d\x8c\x02\x65\x6f\x3e\x1b\x41\x02\xc5\xc8\x7a\x9a\xfb\x85\x68\x0f\xa6\xd3\x00\x63\x7d\xee\xab\x5f\xe4\xe1\x15\xec\xe8\x9d\xd2\xbf\xa3\x9f\xe2\xbf\xee\xe1\x35\x4f\xc4\xe5\x7c\xba\x3b\xcd\x9d\x4c\xbb\x51\xdc\x5b\xe8\xa8\xe8\xe2\x26\x03\xb2\x9f\x92\x89\xe8\xe2\x0b\xc3\xc7\xcf\x4c\xc1\xfc\xea\x31\x32\xba\x41\x45\xc6\xc1\x40\x06\x64\xdf\x3f\x21\x52\x96\x1f\x73\xa5\x12\x08\x14\x42\xd0\x38\x77\xb2\x74\x11\x22\xab\x8f\xa1\xef\xb5\x64\x41\xc2\xcb\x37\x94\x51\x27\x1b\xfe\x1f\x7a\x8a\xb3\x42\x04\xb4\x70\xff\x12\x36\x88\x76\x03\xc7\xc7\x2d\xe7\x5e\x19\xee\x44\xcf\xa4\xd0\x33\xdb\x0e\xda\xbb\xdf\x8c\xe1\x38\x1b\xcc\x72\x5b\x3f\xd3\x27\xac\x0f\x74\xa4\x1b\xe2\x07\x72\x31\xf4\x94\x3a\x8d\x5a\xbd\x85\xab\x0e\xe2\x42\x10\xd4\xd4\x1c\xd1\xaf\xd7\x88\xe6\x90\x1f\xc3\xc5\x13\xc3\xb3\x9d\xa7\xd7\x7d\xdf\x87\xe3\xa8\x70\x53\x62\x32\xa9\xc6\x78\x2c\x5c\x1a\xb3\xc7\x1a\x09\xda\x16\x1b\x05\x11\x1f\xd1\x1c\xa2\x29\x6c\xd3\x50\x0d\xb9\x60\xea\xfb\x6b\xab\x41\x09\x11\xa5\x7a\x13\xf6\xdf\x14\x45\xf3\xcb\x87\x03\xfd\xee\xe8\x93\xa1\xa6\x5d\xcc\x94\x3c\x10\x3e\x1a\x5c\x94\x7e\x33\x0f\x43\xdf\x62\xce\x86\xc6\xab\x77\xd0\x0c\x53\xd8\xf6\x21\x95\x8c\x0a\xe5\xcb\xd2\xb7\x38\xc1\xcd\xd6\xcf\x7a\x87\x57\xb1\x3b\xf7\xfa\x13\xaa\xd4\x2f\x31\x3a\x58\x18\xf9\x64\xd8\xef\xb1\xd8\x90\x26\x76\xbe\x34\x69\x93\x0e\x09\xac\xdd\x62\xed\x7c\x03\x5f\xec\x6f\xe0\x2f\x07\xde\x87\x2c\x76\xaf\xef\xeb\xda\x6d\x09\xba\x3b\xc1\x5a\xea\xc8\xac\xd6\xba\x3f\x6b\xad\xa3\x19\xbf\x39\x0e\x97\x43\x4a\x8c\x22\x4f\x44\x20\x36\x5b\x83\x11\x7c\x1e\xde\x1a\x2d\x88\x22\xbf\x27\x52\x2a\xd4\x37\x1a\x48\xb7\x70\xee\xfd\x2c\xc4\x24\xc3\xf0\xe1\x99\x95\x8e\x19\x6e\x54\x36\x91\xfb\xca\x0f\xb1\x64\xb2\x10\xba\x6d\xec\x45\xb6\xe9\xfe\xdd\x80\x3a\x5f\x36\x76\xf3\xda\xaf\xe2\xde\x0c\xc3\xe8\x71\x32\x8c\xde\xd1\xd2\x75\xe7\xfc\xd5\xba\x8a\x2d\x33\xc1\x24\xef\xbd\xb3\x0e\x94\xa4\x1b\x87\x5b\xf6\xfc\xc0\x6e\xe0\xf6\xba\x8e\x23\x1d\x3c\xbc\x86\xa5\x9c\xe9\x13\x6c\xd8\xb2\x0f\xc4\xdb\xd2\x99\x3c\x6c\x10\xdc\xaa\x89\x53\x35\xc6\x24\x23\xae\x17\xbe\x1c\x8a\x82\xe4\x1f\x39\x97\x59\xcf\x04\xbe\xf1\x10\x28\x8d\xc6\x02\xee\x91\xe6\xf6\xae\xe7\xbe\x0a\x40\xbb\x07\xf7\xa2\x9f\x42\x37\xae\xc3\x04\x93\xbc\xa3\xfa\xef\x69\xd9\x03\xca\x80\xda\x9c\xb5\x50\x89\xdc\x80\x3c\x7a\xb8\x89\x59\xac\xab\x00\x68\xf3\x16\xbf\x9b\x96\x0f\x7e\x30\xf3\x48\x7b\x85\xd0\xcd\x82\x4c\x84\x44\x96\x41\xc6\x48\x88\xa9\xf7\x62\x6b\x67\x7a\x31\x61\xcc\x02\xa1\xdc\xbc\x41\xa8\x93\xbf\xc9\xa2\x6b\xe9\x9f\x91\x01\xc2\xa4\x6e\xc4\x1f\xdb\x57\xeb\x08\xe5\x48\xf3\xd8\x97\xdf\x19\xd9\xa2\x9f\x0c\x30\x82\xc2\xef\x89\xe0\x42\x11\x12\x14\x23\x50\x36\xe0\xa8\x10\x44\x30\xa3\x6c\x93\x01\xdd\x1d\x0c\x2c\x7a\xdd\x14\x73\xc9\x08\xd3\x8a\x8d\x21\xc3\x9b\x1b\x32\x39\x62\x32\xe0\xfa\xcf\x2c\x60\x8a\xa9\x34\xc2\xb4\xe2\x63\xc8\xc0\xb8\x2c\x93\xb3\xf5\x19\xc6\x2d\x76\x60\x41\x9e\x81\x45\xad\x26\xdf\xb7\x49\xdb\xbb\x8b\x3e\x4b\xc0\x79\x88\x24\xf2\x53\x95\x56\x11\x98\x71\xd8\x7a\x73\x2e\x84\x36\xee\x8d\x9c\xa7\x5d\x72\x87\x59\xa2\x5f\xac\x8e\x34\xe2\x1d\x4b\x17\x80\x3f\xfa\x42\xad\x92\x04\x9a\xaa\x74\x5c\x89\x9e\xa2\xd0\x4d\x3e\xc8\x51\xbb\x81\xe3\x96\xbf\xc9\xa1\xab\xbf\x17\x12\x42\x45\xf3\xe3\xc8\xfd\x81\x05\xcb\x57\x01\xcc\xeb\x4d\x59\xb4\x4f\x73\xef\xbe\xf1\x4a\x98\x42\xc0\x16\xf1\x77\xda\x27\xfd\x96\x95\xa5\x43\x28\x00\xb2\x27\xbd\x64\xef\x52\xcf\xcf\x17\xbc\x2b\xc4\xf7\x61\xac\x3f\x7a\xa6\x73\x17\x88\xd6\xe8\xcf\x72\xa3\xaa\xd7\x6b\xea\xe2\x22\x03\xd5\x6a\xfe\xc6\xf2\xa2\x51\x3b\x16\xbb\x9b\x9d\xdb\x04\xb8\xf1\x60\x8d\xdf\xb7\x70\x61\x7d\xf4\x3b\xa3\x4b\xfc\xc9\x0b\x73\x7b\xb8\x5a\xaf\x27\x39\x3a\xb3\xaf\x67\x81\xb8\x19\x28\xb6\x1b\xc6\xbb\xa3\x12\x91\xa3\xc7\xd3\x2c\x21\xe0\x69\x3f\xfb\xe5\xbf\xaa\xa7\x54\x18\xba\xaf\xaf\xab\x73\xd9\x38\x9c\x12\x86\xea\x89\x96\x04\x77\x78\xac\x1a\xfe\xa2\xb1\x17\xa3\xa8\x85\xa5\xb9\x52\x81\x72\xcc\xe3\xcf\xdb\x60\xf9\x57\x36\xbb\xe5\x6c\x76\x23\xad\x13\x51\x47\xf9\x06\x85\x01\x02\x5e\x5c\x58\x1f\x5d\x9d\x2b\x3e\x76\x54\x2a\xcd\x17\x51\x2b\x59\x27\x00\x28\x1f\x56\x7e\xdc\xb7\x44\x6b\xb4\x01\x09\x61\x4e\x05\xff\xa5\x79\xe2\x66\x00\xbb\xfb\xf7\x4a\xdc\xa9\x1e\xbe\x11\xeb\xcf\x6c\x28\xa7\x14\x20\xb2\xa2\xd7\x5d\x24\x86\x0c\x39\x65\xef\x15\xfa\x37\x3c\x36\xc4\xe4\x08\x73\x8c\x27\xa3\x10\xf0\xb0\xed\x7c\x2b\x6e\xbd\x26\x0a\x02\x4c\x80\x4b\xb6\x57\x72\x69\x47\xe5\xe1\xd9\x2a\x1e\x3e\x79\x7a\x8d\x4c\xfc\xc2\x9e\x72\x13\x73\xe9\x11\xa5\x0d\xb5\x12\xdf\x13\x41\x30\x6e\xfb\xce\xf5\x4a\x7a\xbb\x1c\x4c\xc4\x24\xf6\x7c\x14\xdf\x0c\x30\xf4\x42\xb5\xfc\x79\xa0\xa7\xcf\x08\x62\xfe\x13\xe3\xd1\x41\x04\x92\xf2\xa9\xfe\xcd\x0e\x95\x38\xba\xf1\x20\xc7\x13\x01\x92\x88\x97\x5d\xcc\xaa\x89\xfc\x3f\x52\x30\x7f\x68\xa3\x8c\xde\xa9\x21\x03\xec\x2a\xc5\x0a\x2d\xa7\x05\x48\xe6\xd8\x10\x20\x92\x0a\x14\x63\xae\x26\x0d\x01\xeb\xfb\xa2\xb4\x86\xdc\x26\xee\xcf\x77\xed\xf1\x80\xde\x48\xf5\x91\xcc\xd9\x4c\x40\x97\x59\xff\x08\xc1\xb8\x49\x0d\xe0\xe4\xa3\x03\xeb\x88\x79\x1b\xa6\xb5\x86\xfc\x9e\x08\xc9\xd5\x9f\x59\x70\x14\x33\x20\x0f\x95\x23\xee\xa3\xeb\x58\x98\x3f\x71\xca\x7b\xb1\xc1\x76\x86\xc8\x3f\x5f\x89\x57\xf9\x8f\x24\xc2\x31\x40\xf9\x16\x41\xdd\x43\xeb\xe5\xa5\xfc\xd6\x36\x56\x1e\x4e\x21\x02\x45\xea\xf0\xd7\xaa\xf1\x85\x4e\x4a\xa0\x30\x3c\x67\x22\xc0\xfb\x77\xa9\x93\xb2\xc7\x19\x5c\x56\xb1\xf0\xe6\x43\xb7\x1d\x1a\x4a\x00\x2b\x26\x06\xb6\xed\xdb\xe7\x5e\xcb\xa6\x7d\x67\x2c\x3b\xce\x5d\x0b\x55\x46\xd9\x4e\x94\x31\x22\xd0\x85\xf6\x38\x23\x6e\xb6\xb0\x93\x85\x05\x97\x3f\xce\x8a\xb4\x0c\x1c\xda\x4e\x06\xda\x59\xb7\x0a\x4c\x95\x0f\x62\xbd\xc6\x0d\xfb\xbf\x3d\xb5\x43\xc5\x44\x50\x5d\xcf\xf4\x38\xfe\x16\x26\x06\x0e\xd0\x0f\x29\xd2\xae\xf3\xa2\xbb\x9d\x05\x01\x0e\xd0\x9b\x62\x48\x23\xc2\x10\xc2\xc2\xc2\x81\x7d\x3f\xc4\x9e\x11\x85\xeb\xb5\xfa\x31\x6f\xf6\x32\x56\x4a\x01\xe8\x22\x2b\xd9\xfc\x21\x95\x98\x56\x7b\xbe\x2a\xaa\x3f\x01\xac\x8d\x42\x72\x06\xe7\x96\x2b\xca\x08\xe8\xc0\xaa\xe6\x89\x15\x3e\x80\x28\x89\x80\xc9\x92\xb3\xa9\xb7\x95\x61\x9a\x89\x85\x46\x7d\x67\x64\xf0\x4c\xe8\x59\x4d\xf4\xb6\x08\x7a\x41\x7f\xca\x51\x8a\x81\x85\xaa\xd3\xdc\x2e\xfd\xfc\x74\x17\x3c\xfa\x9e\x0c\x94\x9f\xcd\xdd\x53\x21\xdd\x5c\x98\xcc\xa7\xa0\xc2\xe0\x86\x9d\x5b\xc5\xb8\x31\x99\xe6\xb0\x85\x89\x85\xd4\xef\x2f\x02\x2a\x44\xe2\xde\xd8\x22\x2a\x75\xef\x46\x36\x23\x30\x7a\xd5\xcf\xd9\x3f\xd3\x0c\x8c\x37\xba\x9f\x9a\x15\x00\x6b\xaf\xd8\xf0\x8d\xae\x7a\x0a\x37\x83\x4e\xf0\x24\xfb\x51\x01\xf5\x0d\x1a\x71\xca\x11\x16\x81\xac\x35\x3c\xd8\xc4\x6e\x0f\xfa\x93\x89\x7e\x50\x04\x97\x75\x2c\xd8\x24\xf2\x75\x63\xf8\x4a\xa0\x6f\x3f\x0b\x0b\xd6\xe2\xbb\x3f\xd5\x18\x28\xc2\x03\x36\x02\xb6\xa7\x26\x96\xdb\x16\x7e\x3b\x17\x3e\x24\x5d\xb1\x03\x87\x35\x2c\x3c\x56\x38\xa6\xb8\x3c\x1e\x7e\x5c\x04\xd5\x35\xd9\x2d\xb0\xb5\xf7\x2e\x93\x1b\x72\x6e\x25\x3c\xc3\x6c\xb6\x43\xae\xc5\x90\xab\xa9\x40\x29\x10\x71\xc4\xef\x94\x41\xde\xcc\xe9\xcd\xbf\x00\xb0\xf6\xbf\x52\xcc\xb3\xfb\x11\x86\xd6\xe8\x0e\x20\x91\x50\x84\x19\xce\xfa\xf7\x1e\x01\x5e\x00\xeb\x2b\x36\xc8\x7c\x09\x04\xad\x71\x03\x5d\xd4\xc8\x06\x39\x67\x01\x92\x3a\xcc\x27\x7a\x4b\x3a\x3b\x01\x72\x6c\xa7\x6d\x51\x17\x61\x94\xe2\xdb\xc2\x0b\xe0\x20\x35\xa3\x40\x09\x20\xa0\x03\xeb\x62\x72\x1c\x04\x46\x65\xba\xff\x41\x58\x61\x23\xe0\xe0\x9a\x65\x8b\xba\x68\x20\x2f\x17\x88\x80\x4e\x0f\x57\x6c\x57\x49\xe7\x85\x3d\x6c\x04\x5a\x33\xab\x8a\xe0\x9e\x3d\xfc\xfb\xa8\xd6\x8c\xd5\xca\x7e\xbd\x7f\xad\xac\x6a\x6a\x18\x1e\xed\x20\x03\x54\x8f\xf1\x0d\xe7\x9c\x54\xea\xe0\x85\x42\x2a\xc0\x90\x4a\xd9\xc2\x52\x60\x9e\x02\x6d\xd9\xf1\x8b\xe9\x23\x11\x94\x8d\xfa\x51\x01\x16\xbf\x6d\x5c\xc4\x4f\xd4\xcb\x1c\xfc\xf9\x0a\x1c\xd6\xf9\x80\xa2\x54\x87\xbb\xb7\xbb\xa5\x49\x09\xa1\xb7\x11\x01\x6e\xbd\xc0\xe6\xc7\x95\x4c\xa6\xdf\x58\x34\xb3\xbb\x11\x66\x0e\x36\x75\x75\x4b\xd1\x67\xde\xee\x4b\xce\x0c\x5d\xfa\xf2\x68\xb4\x9e\xfc\xa6\x4c\xce\x79\xfa\xf1\x97\x47\xfc\xc6\x1a\x26\x64\xbb\x8c\x99\xc7\x17\x96\xba\x2b\x43\xfa\x1b\xbb\x83\x73\x5d\x56\xf3\x06\x18\xbf\x05\xc8\xce\xd7\xf4\x44\x1b\x0b\x46\xc3\x59\x77\xa3\x04\x6c\xa2\x45\x9e\xda\xa2\x65\xb7\xa2\x0f\x5f\x85\xb8\x7a\xf6\x81\xef\xee\x13\xe1\x56\x90\xda\xf5\x3c\x93\xf5\x32\xd8\xb9\x76\xf2\x47\x97\xfc\xbb\x69\xc7\xee\x0b\x41\x56\xd7\xe5\x90\x74\xbd\x8c\xf4\x15\x32\x73\xb9\x2e\xc9\x8c\xd7\xda\xd2\x51\x99\xab\xf1\x21\xa6\xd5\x7d\x82\x6a\xb7\x20\x0a\x7f\x4a\x57\x6f\x52\x24\x2b\x03\xab\x99\xc3\x35\xbd\xc3\x59\x3b\x67\x03\xfb\x92\x9b\x76\xb4\xdd\x0c\x31\x8e\x8f\x8b\xda\xca\x9a\xbb\x5a\x47\xf4\x3b\x45\x1f\x32\x19\xd4\x10\x8a\xb9\x9e\xfe\xf7\x4f\x63\xfa\x05\xa4\x91\xf6\xfd\xd9\x2b\xe4\x91\xcc\xa9\x27\x1c\x62\x73\x27\x39\xe7\xc0\x62\x2a\x95\xf3\xed\x82\xea\x1a\xfd\x6a\x9f\x15\xc3\xac\x9a\x3a\xe6\x7c\xef\x4d\x50\xe6\x49\x90\x4d\xfe\x9a\x46\xfb\xae\x08\x2e\x2c\x2c\xf3\x51\x35\xb3\x9e\xf8\x93\xf4\x2e\xbf\xa3\x26\xeb\x45\x64\x41\xfc\xf5\xa8\x63\x33\xcc\xb1\x99\xb6\xd7\x5a\xf7\x2e\x86\x5a\x02\x47\xc9\x95\x70\x40\x61\x97\x79\x35\x46\xe5\x1c\x23\x58\xf7\xdf\xa9\x97\x8c\xfa\x70\xb2\x4d\x34\x8d\x06\x4b\x6e\xbf\x6c\xd7\x78\x3f\x51\xcb\x79\x94\x22\x68\x2d\x82\xda\xdf\xb8\x97\x75\xe0\xd5\x2a\x62\x76\xa4\x88\x40\xcb\x92\x4e\x8f\x4b\x54\x0f\x24\x40\xa0\x8d\xd4\xec\xeb\x7d\x73\xd3\x17\xb4\x82\xa7\x25\xe2\xa1\x2a\x65\x59\xc5\xa3\x68\x49\xd7\x33\xb6\x70\x0b\x1b\xcb\xb4\xee\xa4\x0d\x3e\xfc\x18\xbb\x96\x68\x08\x35\xa6\xe1\x5b\x54\x2a\x78\xe1\x23\x15\x25\x16\x49\x32\x74\xa4\x36\xa1\xeb\xed\xdb\xb9\x0e\x17\x63\x68\xb3\x44\xba\x46\xe6\x10\x79\x7e\x61\x46\x88\xa1\xd3\xea\x4b\x7b\xbe\x7d\x42\x62\x1f\x26\xd0\x70\x10\x56\x46\x55\xe8\x57\x63\xdb\x04\xea\x64\xf1\x63\xc5\x18\xaf\x05\x22\xfd\x99\xbc\xb1\x66\xa3\x62\x5c\x8d\xd5\xac\x7b\x6c\x80\x52\x84\xd7\x60\xe8\x70\x33\x87\xc5\x3d\xf3\x5c\x37\x4f\xe2\x46\x4a\xe4\xad\x5b\x36\x4e\x90\xca\x41\x34\xc7\xd4\x6b\xa4\xc9\xeb\x6e\x9e\xba\x7b\x2e\x85\x7a\x0d\x4d\x8c\xf6\xaa\x23\x0f\x8e\xba\x5f\xf4\x38\xc7\x9c\xf6\x31\xdb\x99\xc6\x4b\xf9\x7a\xf7\x70\xb6\x12\xae\xf5\x1d\x56\x60\x70\x77\x61\xde\x31\xf2\x7b\x72\x8d\xbc\xb1\xe6\x11\x7e\x34\xf8\x8c\xf5\xa1\x3f\xd1\x39\x36\x42\x28\xc9\xb4\x44\x06\x21\x89\xf3\xdb\xed\xd1\x0d\x36\xc9\x4f\x4d\x1c\x2e\x57\x0e\xfa\xeb\xce\xce\x6d\x33\x46\x5c\x92\xbb\xf6\xbd\x8f\x77\x86\x3e\x16\x77\x5f\xae\x30\xdb\x44\x60\xe0\x08\x23\x8b\xfd\xb2\x3d\x03\x53\x38\xc0\xac\x9c\x53\x95\xb2\x7e\xa8\x16\x82\x91\x79\x0e\x4a\x03\x2c\x6e\x6b\x35\xdd\xdc\x9a\xc5\xe6\x4c\x1c\x67\xc3\x98\x45\xb4\x7d\xd6\xbb\xdb\x77\x80\x37\xd4\x53\x9e\xb2\x63\x57\x39\x4c\xbe\x80\x9d\xa9\x77\xd6\x2b\xdc\x13\x27\x7d\x0e\xd1\x1d\xbe\x9f\xf8\x59\x5a\xec\x8e\x0e\x7d\xcf\x92\x90\x5c\xbf\x48\x11\x32\x4d\x85\x03\x21\x8c\x63\x5f\x29\x5c\x78\x8a\xd4\xe2\x2f\xc9\xa9\x95\x09\x1a\x71\x3e\xdf\x8c\xa6\x18\x37\x9d\x9a\xb2\xd5\x6a\x13\x76\xb5\x0c\x1b\xf9\x32\xca\x54\xc8\x54\x4d\x72\x9b\xe9\xa0\x9e\x1d\x07\xf3\xb9\x55\x75\xb1\x2d\x16\xfb\xcd\x6a\x75\xa2\xac\xaa\xba\x95\x3e\x7e\x28\x9e\xb5\x9a\x4d\xb5\x7f\xd5\x27\x1e\x64\x2b\x78\x45\x45\x3f\x1f\x4f\x96\xdd\x2b\xeb\x2b\x3a\xad\x15\xf1\xe7\x90\xf0\x29\x29\x0c\xc9\x5e\x82\x54\xb1\xa1\xf8\x7d\xfb\x85\x7c\x93\xe7\x8b\x8b\x13\x17\xee\xbc\x7f\x1d\xc2\xb5\x8e\xb1\xbe\x71\xf2\x69\xb4\xe7\x6b\x28\x67\x5a\xcd\x72\xcf\x8f\x1c\x9b\x2d\xe1\xb5\x70\x78\x23\x77\x8d\x29\xeb\x87\xfd\x64\x09\xa2\x4d\x1c\x98\x37\x9c\x0d\x15\x33\x83\x84\xa0\x7c\x7e\xc1\x43\x93\x38\xd2\x38\xb3\xf0\x65\x3c\x0f\x25\xf4\x3c\xdd\x2a\x9c\x8d\xa3\x86\xfa\x08\xd1\x3c\x48\xe7\x4e\x0d\xd5\x0f\x39\x10\x28\x9e\xab\x02\x7f\xa8\x24\xfb\x16\xce\xcc\xcc\xcb\xae\x6e\xac\xf6\x96\x8b\xe4\xa4\xdd\x52\x9a\x21\x1e\x2d\x3f\xfe\x1c\x8f\x97\xb2\xc3\xd0\xe6\x89\x24\x77\x27\xa7\x81\xc8\x19\x7f\x0c\x59\x4d\x41\x5d\x89\xcf\xc0\x72\x68\x37\x3e\xe1\x27\xc6\x63\x55\x20\x59\x8c\x58\x38\xf4\x34\xa0\x83\xab\xe3\x6e\xe7\x8f\x37\x52\xd5\x62\xdd\xd1\xdf\x7c\x4b\x37\xfe\xd1\xa5\x86\x6c\x4c\x62\x39\xca\x6f\xe4\xc6\x53\xfe\x2e\x6a\xb9\xc7\xd4\x66\x9f\xb7\xfb\xbd\x9a\x1d\x69\x8f\x4e\x7e\xf6\xc3\x2e\x3e\x80\xc5\x82\xc5\x0d\x52\x68\xa7\xba\x18\x8b\xe6\xfe\x6e\xe6\x5d\xa2\x72\x62\x83\x09\xe9\x63\x7b\x7e\x96\x30\x08\x6f\x79\xce\x41\xb4\x2e\x7a\x71\x42\xef\x32\xf6\x20\x91\x85\xd1\xb9\x94\x93\x5f\xb7\x2c\x1c\x1b\x3b\x62\x45\xa0\xcd\xfe\xf8\xa3\x5f\x7a\xcb\x7b\x45\x05\xad\x18\x7b\x92\x92\x5b\x3b\xdf\x6a\x07\x0b\xd0\xf0\x23\xa8\x24\x09\xbb\xb4\x28\xfe\x93\x98\xff\x09\x72\x34\x9d\xd6\xba\x8f\x49\x37\x1e\x54\xd4\xb2\x7a\x29\xc3\x23\xce\x17\xb3\xa4\x5b\x31\x1a\x6e\x0e\xcf\x72\x99\x02\x27\x2d\x0a\xeb\x8f\xe8\x39\x75\xe6\x8a\xa0\x4a\x9d\x32\xa3\x4f\xf8\x5b\x3d\x2d\x1a\x37\xf0\xa1\xcd\x37\xd8\x17\x75\x23\x39\x42\xf9\x22\xe8\xe1\xa5\x37\x22\x2f\x26\x5a\x0c\x5f\x71\x90\x9f\xed\xed\x64\x7b\x76\xa3\xc5\xed\x77\xf5\x44\x2f\x5b\x95\x97\xed\xbd\x17\x68\xd3\xc7\xda\x5a\x55\x98\xdc\x27\x47\xe3\xab\xb7\x6a\xb1\xec\x69\xd1\x71\x9a\x2f\x65\x05\x7f\x12\x2a\x93\x3a\x5d\xe5\xa1\x8e\x87\x45\x25\x49\x4b\x08\x1a\x87\x19\x3e\x12\x3a\x7f\x28\x47\x2e\x9c\x22\x7a\x29\xbe\xb3\x5a\x2d\xff\x2a\x9e\x22\xc7\xc3\xd3\x99\xf7\x45\x12\x1b\xdd\x70\x55\x7c\x7c\x59\xdb\xc3\xde\xea\xba\x0f\xc7\x7d\x90\x83\x4f\xae\x71\xd5\x59\x3f\xa6\xc5\x72\xaf\x5a\x2c\xae\x22\x46\x2e\xf4\x7f\x3e\xb4\xb0\x43\xd8\xfd\xba\x5f\x7b\x56\x4b\xab\x5f\xb4\x89\xe5\x2a\x67\x90\xef\x79\x29\x36\xf2\xa5\x5b\x2f\x1c\xf3\x63\x35\x4e\x0f\x88\xf2\xe7\x77\xd4\x6e\x02\x71\x53\xc5\x17\xde\xb3\xe4\x7d\x67\x2d\x22\x56\x6a\xf2\xe3\x0f\xf7\x10\x50\x3b\x58\x74\x16\xbf\xa5\xb1\xdf\xeb\xec\xe9\x96\xef\x3d\xb5\xcf\xf8\xce\xda\x6d\xc0\x38\x64\x64\xb1\x77\xe1\xed\xc5\x9a\xa2\x93\xaf\x04\x78\x86\xbc\xde\x07\x1f\x31\xa4\x0b\xd2\x02\xaa\x0c\xfb\x53\xf7\x99\x67\xa5\x61\x6c\x22\x60\xa3\xc7\x57\x5c\x4e\x1b\x87\x3b\x59\x5b\x8e\x93\x6b\x4c\x74\xd9\x43\xa9\xbc\xf3\x17\xec\x2a\x42\xf0\xe8\x3a\x87\x22\x77\x97\x19\xd7\x5e\x0d\x67\xb8\x0d\xc7\x2b\x7f\x70\xbf\x58\x3e\x9d\xb9\x69\x8d\x43\xe1\x0a\x99\xca\x59\x49\xfa\x5c\x0d\x95\x18\x09\xd3\x63\xa4\x5f\xb3\x3d\xb5\x5e\x16\x93\x1c\xee\xf6\xba\x6f\xd4\xf2\x4b\xa3\x7a\x59\x51\xc6\x95\xcf\x9e\x14\x87\x5d\x74\x57\x47\xb6\x48\xae\x2f\x08\xae\x73\xa2\x17\xa7\xd8\x9b\xe4\xf5\xfa\x3e\xa9\x90\xda\x4f\x73\xb3\xe7\x66\xd3\xc9\x2e\x53\xf6\xcf\xee\xc3\x77\x5f\x2f\x6b\x78\x68\x5e\xdb\x3a\x53\x93\x2e\x4c\x0c\xf8\xac\x42\x3a\x7a\x7a\x44\xd2\x75\xaa\xd0\x3e\x10\xb9\x5c\x71\x1c\x49\x56\xa8\x5e\x17\x50\xaa\x4f\x14\x7c\x58\xec\x35\x59\x14\x8b\x3e\x95\x03\x7d\xd2\x3b\x1c\xa5\xc8\xf8\xed\x7a\x92\xc6\x5d\x82\xfe\x5a\x8c\xad\x27\xc3\xe9\x20\xfa\x11\x43\x52\xd7\x9e\xfb\xfa\x97\x0c\xab\x0e\x2c\x05\x8f\x7a\x7e\x8e\x36\xfc\xf8\x9f\x32\x3c\x34\x2e\xac\x76\xdf\xdf\xe0\xe2\x5f\xf2\x3c\xc2\x31\x5d\x3e\x6a\x35\x3d\x8b\xfd\xe0\x1f\xe3\xb4\x4c\xae\xdf\x64\x9c\x5c\x6c\x10\x41\x8e\xda\xfe\xc7\x4a\x63\x21\xe4\xce\x52\x0c\x0d\x48\xc9\x7a\x96\x9c\x39\x97\x1b\x73\x1e\xdf\x9e\x87\x33\xaa\xb9\xc6\xf5\x8b\x3d\x45\x72\x82\xf9\x50\x84\x42\xba\xf0\xce\x14\x4e\xdc\x7b\xa3\x22\x5f\xdd\xf3\xa1\x42\xdb\xa3\x53\x9f\x76\x93\x00\x58\x94\x52\x59\xce\xa9\xf5\x74\x0e\xdd\x13\xf4\x5b\x4e\x99\xb3\xb2\x76\x72\x77\x6d\x37\xe5\xd8\xc8\x3f\x2f\x2a\x32\x00\xd0\x5e\x7e\x1b\xdd\x7c\xe8\xc1\x3c\xb9\xb6\x9b\x72\x7b\xac\x18\xa3\x72\x20\x24\x2c\x99\x1f\xa5\x54\x8e\x72\xea\x17\xd6\x7a\xbb\x0b\xec\x75\x5e\x95\xfa\x21\x0e\xdf\xc3\xa4\x3d\x7a\x71\x39\x25\x56\x5c\x78\x4a\x51\xc8\x80\x91\xf5\x78\xa7\x49\xb6\xb2\xe1\xe0\xdf\x02\xfb\xc9\x2e\xee\x80\xf1\xdb\x81\x44\xef\x95\xb9\x65\x65\xc8\x02\xae\xa3\xf5\xd6\x4e\x33\x51\x0b\xcb\x9a\x42\xdc\x9d\xa2\xf4\x47\xad\x3d\xef\x43\x88\x81\xd9\x30\x8b\x53\x49\xd6\xbe\xda\x7e\x29\xff\x23\x75\x26\x62\x36\x74\x4c\xa4\x90\x4a\xb7\x33\x3e\xdc\xab\x4f\xcb\x61\xed\x11\x5d\xe3\x50\x12\xb6\x98\x4f\x64\x6e\xe4\xb8\xbe\x93\x34\x2f\x89\x32\xc0\x40\xdf\x87\x2d\x3e\xce\x9d\x81\x19\x10\x84\x05\xd3\x74\xb7\x43\x07\x03\xbf\x3d\x78\x27\x77\x76\xb2\x47\x7f\x95\xe8\x7d\xaa\x80\xe5\x32\x98\x80\xf3\xb8\x38\xf9\x1b\x09\x00\x18\x1b\xca\x4d\x1f\x38\x5c\x15\x5e\xdd\x86\x19\x4d\xab\x29\x6f\x98\x51\x7f\xe1\xb2\x9e\x01\x8a\xe3\xb7\x22\x23\x27\xe2\xd4\x5c\x21\xb9\xf9\xaf\x4e\x4c\xdd\x70\x6d\x16\xdb\xe4\xe2\x78\x85\xfc\x59\x2b\x8e\x74\x55\x39\x4f\xb9\xc8\xf7\x91\x57\x26\x83\x91\xed\x6b\xa5\x69\x89\x66\xd5\x08\xa5\x42\x68\xb4\xa7\x6e\x29\xba\x2f\xd8\x5e\x79\x7c\x61\x45\x93\x8b\x75\x9e\x9b\xe5\x25\xc7\xf6\xbc\x1e\xbe\xd6\x6d\xe8\xb7\x52\x3d\x81\xa7\x99\x1f\x7f\xf1\x92\xff\xc5\x0e\xe4\x85\x1d\x17\x3f\x01\x5a\x8f\xec\xa3\x73\x86\x6b\x96\x12\x7f\xe9\x2d\xaf\x17\x2a\x4e\x8f\x51\x57\xaa\x5f\xef\xf5\x43\xce\xb7\x4e\x5b\xa0\x22\x04\x94\x30\x1c\x87\xa1\x8b\x4a\x2c\x12\xd7\xda\xb4\xdf\x52\xeb\xb5\xc3\x76\xf2\x32\xb7\x12\xd4\xee\xed\xdf\x77\x56\xb9\x6b\xcc\x5d\x77\x6f\xa5\x19\x9c\xf7\xf0\x1b\x74\x66\xe7\xa6\x11\xd0\xf8\x46\x5c\x33\x00\xbf\xb6\xcd\x23\x56\x78\x4c\xd4\xb2\xda\xac\x95\xcb\x68\xe4\x54\xc9\x09\x86\x80\x83\xeb\xbb\xa7\xa8\xbf\x13\x18\x0f\x22\x82\x39\x45\x8b\xe5\x1c\x86\xf4\xd6\xe1\xb9\x38\x4d\xa1\xc5\xe0\xc5\xa7\xc1\x73\x56\x9c\x0d\x1c\xe4\xa9\x5f\xa8\xc8\xec\x1d\x1c\xaa\x88\x3e\x56\xdd\xde\xa7\x33\xdb\x30\xf5\x52\x53\x7b\x4e\x98\xb1\x50\x70\x88\x2a\x37\x98\x18\xba\xd6\xcd\x09\xc8\xef\x7b\x09\x5d\x1e\xdc\x6a\x75\x56\x89\x81\xd9\xfe\x51\x27\x1d\xf1\x39\x54\x6b\x34\xa1\x0d\xe6\x3f\x0d\x24\xe2\x52\x30\xdb\xd7\x97\xdc\x65\x77\xb3\x02\xb9\x3d\xfc\x59\xd7\xf7\x46\x55\xae\xd6\xbf\x96\x81\x8a\xfb\xe2\x9b\x73\x76\x94\x7f\xe7\x34\xb3\x2d\x70\xec\x18\x59\x71\xe6\xef\x45\x81\xf3\x2d\xdf\xed\xcc\xe0\x46\x31\xff\x4b\x2c\x9c\x32\x49\x1a\xdc\xee\xb4\x46\xde\x37\x57\xf1\x69\xa9\xb9\x66\x86\x07\x9d\x56\xa2\x8a\x04\x39\xf2\x51\xd6\x39\x11\xfa\x86\x61\x63\xe5\x8c\x94\xec\xc8\xa4\xf8\xb3\xdc\xa3\x4f\xc8\x66\x63\xd4\xc5\xfa\x89\x53\x9a\x02\x1c\xd0\x7d\x73\xa1\x6e\xa6\x8b\x17\xf5\xe1\x78\x00\x5d\xa7\x91\x35\x6d\xa6\xba\x54\xf2\x66\x45\x60\xb1\xce\x34\xa8\x29\x2b\xfb\x94\x08\x7c\x14\x22\xa5\x6e\x2c\x8c\x5a\xf1\x1e\x24\x4f\x7f\xaf\xbd\x74\xe2\xbc\x34\x47\x37\x5a\xd2\xb1\xcc\x89\x8f\xd6\x00\x8a\x60\x2d\x7a\x69\x9c\x7a\x6e\xc6\x58\x6b\x76\x76\x0a\x13\x2a\x33\xb5\xae\x19\xe0\x1f\xb4\xc6\x7d\xc9\xeb\xce\x14\xe6\x8d\x2c\xe2\x41\x10\x6c\x34\x88\xfd\xd4\x16\x8b\x7d\xb9\xf0\x5c\x2f\x5a\xe7\xd6\x32\xab\xb3\xf4\xf4\xb4\x20\x9c\xbd\x6b\xeb\x05\x0e\x98\xcb\x42\xad\xb7\x38\x6d\xb1\xa1\x2a\xbf\x66\xb6\xf0\xb3\x4d\xd8\x53\x4d\x97\x23\xa9\xe6\x1c\x4a\xc4\xeb\x3a\x46\xce\x6a\x2c\x55\xe2\xdd\x81\x9d\xe8\x48\x79\x1f\xa7\x7d\x5f\xa8\x10\x73\x7b\xb0\xff\xb7\xc6\x36\xfb\xd3\xc2\xf8\x7f\x31\xe7\x38\x17\x19\x4d\x05\x8e\xb5\x05\x38\x0f\xcb\xf1\x07\xec\x5f\xea\xba\xdb\xcf\xed\xe9\xb3\xe0\xac\x11\xdd\xbf\x14\xb4\x88\xca\xec\xe6\x67\xa7\xb2\xa3\x87\x7d\x97\xfc\x0e\x71\xa4\x95\x2b\x60\x6d\xcd\x77\x03\x7c\x70\x36\x89\x62\xbb\x3e\x67\x68\x8d\x17\x3c\x1e\xe1\xd4\x0b\xb0\x77\x9b\x1d\x28\xe3\xe8\xa4\xb2\xa3\x37\x3b\x3c\x51\xde\xbe\x9c\xae\x00\x5d\x57\x6d\x80\x2e\x1b\x53\xfb\x4e\x8d\x21\xff\x76\xba\xfb\xfc\xf8\xe4\xa0\x50\x85\x00\x28\xf1\xc6\x66\x68\x22\x77\x84\x50\x1a\x97\x0c\xcc\x6e\x7c\x3f\xe8\xea\x83\xa7\x16\xbc\xe7\xc4\xbb\xb3\x0b\x4a\xc9\x40\xd1\x0e\xb0\x83\xcb\xca\xe8\x2c\xdf\xfb\x41\x0b\xed\xd9\x15\xe6\x93\x3d\xc7\xf8\xd8\xc5\xe7\xa9\xa8\x1c\xc6\x06\x8b\x5e\xc4\xf8\x1c\x43\x20\x22\xef\x63\x14\x8e\xba\xa2\xe4\xb6\xb4\x9e\x57\x3a\x29\xdf\x54\xcb\xdd\x09\xa4\xb7\x05\x38\x34\x14\xe8\x7c\x99\xcb\xa7\xdf\x65\x87\x9f\x6f\x74\x9f\x59\x95\xbf\xb7\x3c\x83\xef\x84\x9c\xd0\xe2\x19\xc4\x1a\x41\xed\xd5\x66\x83\xc5\xfa\x74\x85\x67\xbe\xc9\xba\xcd\x0b\xff\x7b\xa6\xe6\x8e\x05\x7a\x00\x87\x7b\x3d\xd5\xd7\x35\xf2\x55\x97\xca\x7f\x58\x58\xac\x7d\x22\x7a\xa6\xaf\x6b\x66\xb2\xe3\x75\xe1\x00\x33\xa3\x6a\x6e\x3d\xf0\xa1\xa9\x13\x7f\xeb\xe0\x8c\x27\x67\xe4\x66\xdd\x37\x80\x29\xf1\x27\x25\x7d\x58\x78\xf3\xec\x6d\xef\xa0\x18\x95\xd1\x31\x78\x4c\x37\x2d\xa0\xc7\x63\xd8\x5e\x87\x63\xff\x3b\xdc\xde\xfb\x56\x7e\x2f\x6b\x5e\x61\xb5\xa4\xc0\x91\xbf\x95\x37\xe5\x55\x3c\x3c\xdb\x7c\x9f\x1e\xfe\x95\xd8\x81\x41\xc5\xd4\x18\xeb\xe5\x3c\x5e\x43\x1f\x1e\x3c\x1c\x9b\x98\xb0\xd3\x5b\x3b\xe3\xe6\x32\x77\xe9\xfc\x10\xe7\xf3\xdc\x9e\x07\x04\x76\x71\x44\x26\x7f\x2b\xa7\x8e\x92\x15\x20\x07\x6b\x9b\x55\xe8\x24\xdd\xaf\xab\x8b\x45\x83\x26\x47\xac\x9b\x77\x38\xbc\x6b\x1d\xe9\xca\x29\xb2\x67\xba\xc5\x37\xb2\xbb\xff\x1a\x98\x99\x67\x9f\xe9\x9a\x78\xe0\x93\x3e\x15\xf9\x80\xff\x22\xf9\x63\x86\x30\xe5\xb6\x4f\x38\x7f\xeb\x2d\xa0\xe9\xae\xbd\xcc\x0e\x3c\x48\x1d\xd9\xfd\x90\xca\x71\x75\x4d\xb8\x95\xb0\xc3\x2a\xfe\xeb\x31\xaa\x93\x3d\xf3\xfe\x3a\x6b\x49\x7b\xe4\xc9\xc1\xc1\x96\x61\xaf\x38\xab\x4e\x34\x3e\xb4\x6c\xb5\x0a\xd1\xce\xc5\xc3\x69\x1f\x74\xff\x7c\xd9\x4c\x7c\xe0\xc0\xea\xb1\x97\xd5\x2b\x99\xde\xb3\xd9\x4a\x24\x53\x6f\x9a\xd5\xc2\xf7\x92\xfb\xef\x6b\x1d\x7f\xb6\xfd\xe4\xda\x3e\xfb\x3d\xc9\xb6\x54\x22\x87\x73\xc3\x31\x72\x00\x32\x93\x81\xe6\x68\x35\xb9\x34\x33\x15\xaa\xfc\xdb\x2b\xac\xa7\x79\x9f\xa9\x5a\x13\x6b\x4d\xf9\xeb\x67\x3d\xa7\xce\x30\x59\xed\xb9\xd7\xeb\x81\xe1\x9c\x6f\x17\xd2\x9e\x71\xb9\x3c\x64\x26\x61\xca\x35\xec\xb8\xf1\x41\x97\xdd\x56\x17\xce\x76\x0c\xbe\x4d\x8d\xe3\xdc\xe9\xa0\xe4\xa6\xa1\x4a\x81\x58\x6f\x94\xd9\xf9\xd6\xef\x9e\xcd\x56\xb5\x17\x25\x17\xd2\xca\x1c\xf9\x2f\x8e\xd6\xf4\x4a\xcc\x10\x1e\x8a\xbf\x70\xad\xde\x35\xbb\xd6\x9b\xca\x49\x11\x2b\x9b\x57\xa7\xd0\x64\xf5\x5c\x57\xfd\xd2\xea\x49\xf6\x76\xfe\x90\x39\xda\x8d\x3e\x45\x54\xaf\xd7\xc6\x71\x2d\xb9\x2d\xef\xb7\x29\x90\xc8\xc9\xde\xb7\xbb\x7f\x96\xf2\xf3\x67\xc1\x9e\xe7\x93\x08\x1a\xbc\xc8\x57\x93\x14\x58\x7b\xda\x95\x1d\xd8\xae\x41\xb9\xaf\x7f\xe4\x29\x2f\x47\x72\xa9\xbe\x4c\xe1\xa0\x66\xe8\xfe\x4f\x6f\xbf\xb4\x3d\xe3\x1a\xcd\x0e\x97\x2b\x32\x4f\x2d\x95\x48\xbe\xd0\xc9\x33\x7a\x19\x07\x96\x6d\x1e\x73\x6f\x0a\xc3\xd9\x0b\x6b\x06\x51\x73\xc6\x79\x29\x27\x1d\x2c\xdd\x27\xc3\x59\x53\x81\x12\x83\xed\x12\x93\x7e\x26\xeb\x2c\x13\x81\x01\xa9\xad\xc2\xaf\x94\xca\xb7\x97\xfb\xd5\x57\x7e\x4e\xa4\xce\x3c\x5a\x5f\x77\x19\x16\xd5\x67\xcd\xec\xff\xb2\x48\xe6\x35\xc0\x40\x21\x63\x3f\xa3\x7b\xf1\x1b\xf7\x2a\xff\xa8\x0f\x6d\x7c\xe9\xd2\x43\xae\xc8\xea\x3c\x46\xfd\xe3\x98\xe4\xa5\xd6\x7b\x03\x33\xd4\x73\x81\x95\x49\x1a\x63\x23\x9f\x53\xb2\xa9\x56\xd3\xb5\xa7\x5d\x7d\x8a\x10\x6d\x3f\x64\xf3\x15\x1c\x7c\x6b\xed\x48\x2f\xb1\x62\x8e\x6f\x9f\xf6\xeb\x2e\xd3\x31\xdb\x53\xe8\x8f\x06\x08\x01\xff\xe2\x8c\xfb\xfd\xc0\x72\xf9\xf9\xfb\x6a\xdd\xc8\x37\x25\x4d\xcd\x3b\x05\x12\xb4\x31\xa2\x91\x34\x82\xee\x5b\x5c\x9c\x79\x5a\x35\x13\x34\x41\xcb\xe2\xba\xb0\x3d\x75\xc2\x30\x90\x6f\x54\xa4\xe6\xc0\xd8\x98\x56\xd2\x67\x8b\x53\x8f\xb9\x82\xd6\x4c\xda\xde\xe0\x41\x9e\x11\xb1\xb6\xc7\x37\xbe\xc9\x49\xfe\xd2\xf6\x1c\x67\x65\x7d\xd3\xdb\xcb\x6b\x56\xba\x2b\xac\xbe\xd7\x1a\xe3\x23\xcd\x43\x1a\xef\x97\x6e\x69\x14\xe3\x5c\x9a\xbc\x17\x76\x14\x52\xdf\xa0\x38\xb0\x9c\x5d\x5d\x24\xbe\x2e\xcf\x3a\xff\xa3\x34\x43\xfe\x62\x59\x94\x81\x43\x38\x4b\xbb\xe0\x1f\x7b\x88\x2e\xbb\x1d\x2c\x93\x48\x5e\xce\xc0\xac\x75\xf7\xde\x6d\xab\xc1\x5b\x72\x82\x10\x02\x3c\xc8\xd1\x75\x75\xff\x1c\xb6\x6a\x65\x35\xc2\xd9\x96\x79\x47\x33\xe0\x1a\xa7\x36\xe7\x0e\xa3\xbb\x47\x67\xb6\xb5\xfa\xa7\x2c\x5c\x1a\x67\xaf\x50\x57\xe3\x0e\x4f\x8f\xd5\xa7\x78\xa9\x53\x38\x99\x21\xa7\x6c\x74\x7f\x2d\x0a\xd8\x27\x43\x4e\xd0\x1a\xbe\xb7\xfb\x0b\x95\x75\x42\xf2\xf4\x6a\xcb\xf4\x4e\x15\x3f\xa1\xd9\xd6\xea\x2f\x31\xcd\xc4\x2e\x6f\xdd\xd1\x19\xbf\x1e\x7f\x50\x5e\xa9\xdd\x8c\x77\xd2\xbd\x10\xd8\x1c\x24\x7d\x7c\x9b\x8f\x0d\x76\xb1\x25\x5c\xf5\xa3\x5d\x5d\x0a\xb5\x29\x74\x37\x76\xf4\xe4\x2e\xe4\xc5\x09\x2f\xfc\x4e\xda\xef\x24\x9b\x12\x26\x79\x8d\x7e\xd5\xb9\x26\xe9\x69\xc6\xed\x87\x5c\x93\x6a\xa3\xe7\x6b\xb9\xa2\x94\x57\x66\x7a\xc6\x42\x3e\xcb\x99\xa9\x93\x9a\xda\x6b\x6b\x1e\x8f\x7e\x6c\xba\xfc\xf8\x6a\x01\x8e\x76\x4d\x16\xa1\x4c\xab\x3d\x9e\xfc\xf1\x77\xe6\x54\xea\x7e\x9d\xbe\xa5\xcb\x3f\x0f\xa4\x4d\xac\xdc\x7b\x8d\x4c\x1f\xb2\x13\x0a\xac\x6a\xdc\x82\x37\x72\x6d\x9a\xb8\x44\x0f\x14\x29\x94\x48\xbe\x30\xc6\xd9\x50\x4d\xdd\x1c\xf8\x68\x15\xfb\x1c\xc9\xb3\xe6\x96\x9c\x0b\x9e\xc8\x6e\xdf\x33\x7a\x36\xe4\xcd\x1e\x72\xca\xc4\xae\x8a\x8c\xe8\x1a\xcb\xf3\xdc\xef\x7d\x4f\x45\x59\xbc\x50\x83\xe5\x4b\xf3\xdf\xa9\xdf\x52\x1e\x1b\xfc\x9a\x98\xba\xf1\x28\x60\x85\xeb\xa4\xeb\x1a\x79\x79\xa2\xd6\xab\xf2\xc2\xa6\x51\x91\x7d\xb0\x33\x75\xf0\x23\x75\x8e\xeb\xed\x9d\x74\x16\xd1\x7c\xff\x93\x12\x7b\xc1\x5a\xb7\xdf\x95\x3d\xa3\xb1\xfd\xf6\xc8\x7d\xdd\x28\xe5\xa5\x75\x49\x66\x46\xe5\x13\xae\x93\x8e\x6b\xe4\xdb\xbf\x9c\xf9\x28\x2e\x36\x60\xbd\x85\x5f\xad\x66\x23\xe7\xc9\xfe\x0e\x4e\xf1\x1f\xce\x23\xaf\xad\xa4\x69\x39\x7d\x7e\xf6\x4c\x84\xca\x6b\xdf\x17\x5d\x2f\x97\xc5\xc7\x5c\xa2\xba\x26\x22\xc2\xf3\x65\x7e\xc9\x1f\x51\x1d\x3a\x76\xaf\xd6\x3e\xb3\x08\xb9\x97\xce\xce\xaf\x25\xd2\x31\x1d\xaa\x33\xdd\xf7\xa1\xd9\x70\x90\x0b\xdf\xf0\xa5\x21\x96\x9d\xb5\x9b\xbc\x76\x46\xaa\x4b\xc5\xbe\x30\xba\xaa\x9e\x38\xd0\xba\x5f\xb6\x4e\x3d\xec\xf8\x12\x2e\xe7\xd8\xad\x5c\xce\x01\xf2\xdb\xc0\xea\xc9\x44\xb6\x4b\xb7\xdc\x5a\xd9\xcb\x88\xeb\x39\x8d\x45\x8b\x25\x33\x7b\x54\x96\x04\xd2\x2f\x2b\x2f\x44\xfd\xdb\x4b\x76\x13\xa7\x36\x4c\xc7\xad\xa5\xa6\x23\x31\xae\xc2\x72\xd4\xaf\x6d\xfe\xa5\x1d\x46\xa3\x58\xe8\x53\x46\x19\x97\xad\xd6\xd4\xff\x28\x70\xf0\xf5\xb3\x5b\xd9\x54\x56\x52\xc9\xdd\xb5\xb8\xb2\x3b\x42\xe8\xe3\xd8\xfd\xce\x89\x4c\xe2\xee\xd6\x31\x6a\x76\x13\xe7\xec\x34\xdb\x6f\xb5\xb6\x23\x91\x5b\x37\xb7\x5e\xfc\xbb\x5f\x81\x3d\xc6\xde\xb9\x31\xbf\x8c\xfb\x9d\xa2\xe4\x62\x7a\x47\xcb\xfb\xaf\xb7\xf8\x8b\x47\x99\x73\xb1\x4c\x7b\x43\x6c\xbf\x6f\x35\x17\x23\x0d\xb4\xde\x0e\xfe\x26\x3d\x7c\x4f\xb6\xcc\x89\x8a\x48\xdc\x2a\x50\xbd\x61\xbc\xa2\xac\x2b\x1c\x77\xff\xfc\x26\xe6\x42\xee\x0a\x6b\x6c\xfd\xa3\x51\xdd\xfd\x7b\x7f\x93\xde\xe2\x51\x83\xc9\xca\xfa\x41\xca\xf9\x69\x83\x2a\x79\xef\xd0\x92\xd2\x3b\x9a\x63\x9f\x22\xcb\xf8\xd1\x13\x63\x87\x12\x4b\xaa\x56\x35\x11\x97\xd0\x60\xe1\x9c\xea\xb7\xdb\xa0\xa1\x87\x25\xb9\xc2\xf9\x9d\xca\x79\xe7\x13\xf6\xb0\xf4\xd5\x4a\xe2\x60\x99\xf7\x23\x53\x27\xf1\x13\xa3\x02\xbb\xcb\x84\xa2\xda\x99\x7b\x45\x60\xd8\x32\xf6\x97\x9c\x95\xfc\xb7\x8a\x66\xc9\x42\xe6\xe0\xd4\x87\xfc\x87\x25\x2b\xd5\xd4\x27\x2b\x9f\x0d\x4d\x4d\xf6\x31\xca\x26\xb9\x6b\xbc\xef\x21\x39\x18\x34\xe2\xcc\xdc\x45\x46\x60\x71\x7d\xa8\xf6\xd8\x72\xdd\x41\x09\x7f\xa8\xce\xbb\x16\xa1\x1e\x4b\x16\xe6\x04\xb9\xfe\x10\x9a\x78\x02\xa4\xd0\x9a\x8d\xe0\x4b\xd5\xfd\xb6\xef\x79\xb5\xd2\x81\xf6\x46\xd3\xf6\xc1\x6c\x32\x73\xd7\xc0\x0d\xcc\x90\x14\xe3\xa3\x7c\xa4\x9c\x43\x0f\x36\x28\x98\x8b\xf4\x2f\xd7\xe4\xe2\xe2\x1c\xb1\xff\x95\x98\x49\x09\x5f\xd2\xad\x57\x88\xdc\xee\x64\x18\x36\xe2\x66\xf3\xb0\x67\xcd\x1c\x04\xec\xe5\x80\x8f\xef\x82\x1b\xd6\x70\x77\x44\x35\x87\xa8\xf9\x62\xb9\x72\x76\xa7\xcf\x56\x4a\x6c\xdb\xfa\x64\x66\x4b\x9f\x0e\x91\x99\x49\x17\x98\xd9\xf2\xa4\xe4\x30\x74\x1d\x4a\xc4\xc1\xdb\xd1\x65\x1f\xbb\xf4\x4f\xfc\x45\x67\x07\x74\x4c\x0b\xf9\x69\x98\xfb\x4a\x63\xba\x9c\xfc\x92\x6f\xaa\xb3\xdc\x33\xdf\xfd\xb6\xa6\x3f\x7f\xa2\x24\x0b\xb5\xdb\x07\xb5\xa5\x29\x75\x11\xca\x9c\xfa\xc8\x2f\xca\xd3\xe1\xdc\x95\xaf\x34\xe3\xf7\x97\xf1\x3a\x64\x4c\x2f\x93\x97\xfe\x0e\xbe\x19\x11\x08\x64\x75\xcf\xe9\xf4\xb8\x63\xcb\x95\x4b\x70\x88\xe3\xec\xd2\xda\xa0\xe1\xe4\x1f\xf2\x1c\x66\x75\x7a\xbd\x4b\x53\x72\x26\xfe\x10\xb7\xa8\x0f\xf6\xa1\xdf\xd9\x1f\x33\xd7\xeb\xc2\x79\xd1\xec\x80\xc4\xb9\xd7\x11\x57\xf6\xfe\x17\x5d\xca\x21\xbe\x66\x96\x14\x32\xde\x10\x92\xd8\x61\x66\x4f\x65\xcb\x91\xf6\x7c\x16\xdb\x7d\x1b\x27\x6f\xef\x22\x37\xc3\x77\xf5\x02\xe7\x8f\xc9\x0d\x71\x44\x75\xd8\x93\x43\x0e\x20\xf5\xb3\xf2\x4f\x9e\x62\xca\x9b\x68\x9a\x0b\xf5\x70\x79\xa8\x07\xa9\x7e\xbd\x45\x66\x45\xa5\xea\xbf\x99\x4c\xae\x9d\xbc\x76\x9c\xec\x33\xcc\x03\x77\x3e\xed\xe2\x4c\x10\xa2\x8a\xfb\x39\x7e\x3f\x4b\x8b\x6b\xdb\xb8\xde\x84\xe8\xba\xd1\xdb\xe8\x9c\xb6\xd3\x0b\x6e\xe4\x2a\x1b\x99\xcc\xf5\xfd\xcb\x13\xb5\xaa\x7f\x92\xda\xbd\x23\x79\x4c\xfb\xb5\xb1\xb4\x70\x68\xb8\x1b\xa8\x0c\xfd\x45\x85\xfa\xe5\xf6\x66\xb1\xb5\xed\x32\x3b\xc6\x54\x72\xf4\xcc\xae\x0f\x78\xd3\xa8\x52\xda\x73\x64\x6a\x82\x62\xd4\xce\xf4\xae\x50\xf8\xfe\x92\x17\x0e\xa2\x94\xa7\xc1\x62\x43\x16\x1d\x49\x7a\x77\x27\x77\x53\x3b\x0e\xae\xf4\xdf\xf1\x1e\x9d\x3c\x9a\x5a\xe5\x2d\x4b\xeb\xfe\xd5\x74\xc0\xe7\x2d\x67\xe9\x75\xad\x4f\x44\x7e\x5f\xd2\x39\xd5\x4c\x67\xe8\xfa\x7e\xfa\x06\xe3\x5d\xe1\x76\xd3\x5e\x9d\xbd\x6b\x1c\xed\x91\x64\xbb\x9f\xae\x1c\x6d\xb3\x5e\xd0\xe6\x0b\xa2\xff\x9a\x96\x94\x2e\x2a\x8a\x1f\x5d\x19\x6d\x7e\xeb\xf7\x80\x71\xc4\x37\xda\xb1\x00\x6f\x5d\xd2\x2b\x8e\xb4\xe6\xdc\xe6\x9c\xc5\xc4\xec\xe6\x98\x1d\x6b\xd7\xa0\xe9\xf4\x71\x8d\x9a\x62\x39\xc2\xb5\x4c\xbe\xea\x28\x9b\x98\xe2\x96\x96\x72\xf6\xdc\x4b\x51\x94\xfd\x30\xfb\xba\x13\x5a\x9e\x79\x3d\x6c\x85\x5c\x3c\x4f\x3d\xd6\xae\x81\x6e\x2c\x6b\xe3\x7a\x13\x7f\x94\x5c\xe4\xeb\x16\x60\x01\x01\x4f\xe0\x70\x62\x60\xf6\x29\x71\xc1\x17\x3b\xbf\xf5\x73\xd4\x9a\x38\x16\x5f\x35\x28\x42\x67\xb8\x5c\x0e\x5f\x5e\x34\x1f\x24\x73\x6a\x6b\x1b\xee\xc5\x97\x10\xd8\x8f\xb7\xec\x7b\xba\xb7\x66\x49\x5a\xf1\xe6\x7a\x9d\xa1\x3f\x28\x1e\xc4\xd8\x38\xf4\xcd\x53\xdb\xd3\x99\xda\x61\xcc\x5f\xfc\xf1\x72\x6b\x15\xfc\xd6\x9f\xb5\x40\x77\x6a\x82\x34\x42\xcc\xa7\x72\xae\x9f\x31\x36\xbc\x72\x4f\x23\x3c\xca\xae\x6b\xbd\xb8\x7a\x0b\xeb\xa8\x10\xd8\x38\xf5\xcd\x73\x06\x52\x9b\x38\x16\xef\xf2\x14\x68\xe2\xf1\xd9\xe0\x52\x95\xd2\xca\x78\x73\xed\x89\xfd\xda\xcc\x93\xa6\x0c\x9e\x35\x0a\x9f\x33\xe7\xe4\x6a\x9c\x9d\x0c\x1c\xf7\xf6\x8a\x3d\xd9\x69\x9c\x93\xaf\xe8\xce\xd8\xeb\x5d\xf0\xd4\x3d\xbc\x40\x77\xb9\xa4\x24\xee\xde\x14\xdd\xea\xdc\x56\x8a\x85\xa1\xfc\x2f\x79\x63\x4d\xb3\x9f\xde\xb4\x5a\xc6\xaf\xd7\x89\xea\xd5\x4e\x63\x1e\x77\x05\x1c\xbe\xdc\xdf\xad\x00\xd1\xa5\x1d\x67\xb0\x86\x76\x11\x0e\x16\x4f\x6f\xff\x3a\x8b\x29\x66\xbf\xdb\xae\xb5\x54\xf0\xc6\x3c\x36\xbf\x85\x80\xde\x59\x50\x7f\xdf\xee\x51\xa2\xef\x96\xae\x16\xc9\x18\xc3\x20\xc4\xd1\x98\x8a\xf7\xcd\xf3\x49\xc5\xa6\x70\x86\xb4\xef\xea\xd2\xaf\x86\x6b\x02\xb1\xf6\xa7\xaa\x35\x0a\xe5\x4f\x16\x5b\xbe\xa9\x91\x70\x68\x5a\x5e\x1b\xa4\x60\x6b\xe4\x4e\x64\x9b\xd8\x4a\x5b\x3c\xe7\x59\xa4\x60\x72\x25\xa7\x03\x2b\x76\x20\xa3\xce\xc9\xf7\xb4\xdf\xa1\xc9\x29\x7e\x37\xed\x41\xad\x21\x75\x75\x80\x69\x24\x20\x84\x7e\x79\x6c\xbb\x77\x2e\xa0\x4a\x7c\x03\x4c\x29\x37\xf4\x25\xe0\x3c\x8c\x5e\x73\x5e\xff\x76\x3e\xfd\x86\xf7\xda\x33\xef\x2f\x5d\xa7\x4e\x11\x25\x16\x7e\x29\xaf\xb7\xd4\x0e\x5f\x92\x4c\x7e\x9e\x66\x0f\xb6\xbd\x67\x6b\xeb\x6c\x29\x24\xec\xf3\xf4\xb3\xf2\xbb\x53\xd7\x87\x9f\x14\x2c\xae\x6c\xb7\x3a\x77\xf1\xc3\x16\xd3\xf4\xb4\xb4\x3f\xd4\xce\xa8\x81\xda\xb3\x22\x82\x0d\x57\x9f\x9a\xa1\xec\xe0\xb7\x92\xdb\x3a\x66\xb9\xeb\x9d\xb7\x9d\x75\x73\xc7\xb5\x3a\x7a\x7c\x2c\xe9\x89\xaa\xea\xe7\x2e\x56\xf7\x2f\xc3\x93\xf8\xb5\x6f\xfc\xcd\x2b\xea\x13\x0f\x0d\x53\xa4\x9b\xdc\x8c\x0a\x00\x3d\x3e\xd9\xbb\xbe\x2b\x73\xcb\x04\xf4\x49\x0b\x4f\xf2\x26\xfb\x50\xa4\x8a\x1d\xa2\xde\xf5\xa7\xb2\x8f\x8b\xd1\x77\x5b\x77\x13\x6a\xff\x41\xd2\x09\x58\x4b\x28\x2f\xa8\x06\x7f\x79\x2f\x34\x33\x54\x3d\x3f\x9e\x9a\x25\xbf\xad\x18\xf3\xe2\xad\x80\xa9\xbb\x7b\x86\x3a\xad\xc8\x1f\x43\xb3\x80\x93\x63\x43\x71\xd3\x78\xa7\xee\xf5\x21\x9f\x8e\xa7\x37\x77\x6f\xec\x7d\xdb\x23\x89\x61\x9e\xf2\x52\x7f\xf4\x7a\xfd\xde\xcd\x43\x54\x8b\xb8\x27\xfa\xa4\x5d\xba\xfe\xef\x44\x23\x06\x2e\xf6\xb2\x26\xce\xfa\xb4\x0f\x72\x42\xf5\x82\x66\xea\xb0\x04\x48\xde\xfd\xce\x5c\x69\xe6\x2e\x5c\xce\x0d\x12\x86\x7b\x67\x77\x31\x46\xe9\x8e\x22\xaf\x9b\x2d\xb4\x43\xfe\x1c\x19\x11\xba\x55\x75\x01\x3c\x64\xa5\x8f\xd7\x3b\x0c\xec\x45\x1c\x22\x84\xa1\xa1\x4e\x36\x4e\x14\xc3\x7a\x74\xa2\x90\x23\x37\xf5\xfa\x9d\x4d\xfc\x6a\x72\x07\x39\x59\x5b\xba\xe2\x89\x25\xb9\xbb\x7b\x4c\xfe\xef\xc7\xb5\xf3\xd4\xe2\xdf\x56\xd8\x78\x6f\x81\xb5\x7b\x91\x83\x9b\xc4\x3e\xdd\xbb\xd6\x7d\xfa\x42\x36\x67\x35\xb4\xc0\x7b\xcf\x03\xb6\x42\xdb\x13\xc7\x12\x9c\xe4\x45\x46\xc8\x59\xfd\x7b\xf6\x2a\x58\xa3\xd2\xa5\xd5\x7b\x96\xe4\xee\xbd\x1f\xac\x12\x07\x75\xab\x47\x66\xd3\x1c\xa5\x43\x28\x68\xaa\xcf\x97\xfe\x2b\x2b\x63\xea\x8c\xfa\x5d\x9c\x99\x9e\x9f\x33\x9f\x2c\x59\x99\xbd\x6b\x42\xae\x63\x1d\x39\x13\x06\x18\xfa\xc7\xab\x06\x34\x93\xdb\x47\x94\x66\x16\x83\xdf\xfb\x52\x77\xc4\xa2\x0d\xb7\xf8\xe4\x6b\x82\xf7\xae\xd3\x93\xc8\x2f\x8f\xb8\xcc\x0e\x7b\xbf\xa4\x6e\xb4\xef\x01\x1c\x95\x14\xd9\xc6\xf9\x79\xec\x29\x8b\x59\x44\x80\xff\x85\x11\x28\x7d\xf3\x9c\x9b\xfe\x01\xe5\xf5\x21\x67\x26\xd2\x13\xb7\x62\xcc\x86\x12\x07\xdd\x6f\xe7\xd9\x4e\x49\xc8\x25\xc7\x75\x72\x41\x67\x91\xbb\xd6\x16\x9c\xe1\x9d\x04\xbd\x48\x5f\x8c\x50\x21\x93\xc3\x90\x0e\x58\x6c\xee\x7c\xc4\x6f\x6c\x4c\xad\x72\xbb\x75\x91\x73\x74\x2e\x5c\x7c\x60\x15\xc5\x81\x69\xaf\x2f\x16\xf5\xed\xc4\xe5\x88\x3e\x77\xd3\x83\x5b\x0a\xf3\x8b\xe2\x2b\xdf\xdf\xae\x0f\xf4\x7e\x92\x15\xe8\x4f\x9f\xcc\x1e\x39\xd1\xac\x75\xad\x7c\xd4\xce\xe0\x39\x07\x19\x73\xf6\x07\xeb\xe2\xfb\xf0\x62\x93\xa0\xa3\x62\x11\x7f\x4e\x5a\xdc\xfe\xa4\xa1\xd1\xd9\xc1\xbf\x3a\xd7\xb7\xbb\xb8\x58\xc6\x6d\xba\xfd\x4f\x1a\x36\xbf\x96\x78\x33\x69\xa5\x82\xfd\xac\x52\xf6\x94\x1f\x8a\x76\xd7\xf8\xfe\xd8\x85\xa8\xe6\x65\x0c\x26\x6c\x81\xeb\xef\xae\x79\x0f\xbf\xe7\x1f\xca\x7b\xe8\x8f\xe6\xc5\x04\xd9\x66\x79\x70\xcd\x7b\x9d\x61\x85\xfa\xb1\xee\x57\x08\x83\xa2\x1f\xdb\x08\x95\x71\xe3\x44\xb2\xaf\x5f\x1c\xe2\xaa\x72\xd5\x49\xa8\x5d\xeb\xb6\x6c\xd7\xa0\x1c\x89\x66\xd8\x8f\x24\x10\x29\xb8\xc6\xc7\x1d\xc2\xb4\xe2\x2e\x4b\xc0\x70\x21\x29\xc5\x55\xc4\x47\xf1\x5b\x27\x5d\x94\x38\x9a\x3e\x5b\x49\x63\x96\x57\x76\xc8\xcd\x69\xb1\xb0\xe2\x84\xee\x9b\xfd\x7e\xff\x34\x29\xb8\xa3\x19\x21\xf9\x85\x6d\xc4\xd6\x45\x9e\xeb\xea\x9c\xa7\x76\x8c\xc6\x7e\xe9\x81\xdb\x5a\xe4\x4d\xcf\x3e\x52\x6d\x23\x62\x91\x86\x8f\xab\x73\xc5\xc7\xfc\x58\xf7\xd5\x85\xd1\x27\x73\x53\x65\x4a\x38\x83\xae\x81\x6b\x3d\x10\x66\x8b\x0c\xdd\xe1\x84\x0e\x66\x28\xd1\xbc\xfc\x17\xeb\xbf\x08\xa5\x56\xbd\x9e\xcb\xcc\x65\x0a\x9c\x3c\x51\x8c\xa7\x58\xb8\x26\x6e\x74\xf6\xbc\x87\xa5\x3c\x52\x63\x0c\x1f\xc1\x5f\x73\x66\x77\x4b\xe9\x54\x07\xcf\xa8\xfa\x5b\xc9\xb8\x5a\x3d\xb0\xdf\x8a\x76\x90\x93\xa5\x7a\x5d\x9b\xa7\x97\xa7\x5b\x53\x41\xbc\x29\x9f\x57\x70\x7d\x9a\x93\xd4\xa7\xc7\xd2\x9c\xb2\xdc\xbf\xe1\xb1\x3f\x7a\x40\x61\xef\x03\x76\xf4\x86\x2b\xbb\xec\xe0\x2e\x96\x22\x39\xff\x18\xf3\x4f\x3c\x82\x32\xfd\x9a\xa8\xda\x66\xf9\xfc\xb3\x6d\xa7\x7f\x06\x70\x0d\x2f\x0a\x9c\x7c\x9a\xc1\xae\x1d\x57\x40\x3c\x36\x42\xd8\x97\x97\xbc\x39\xfa\xf2\x96\x41\x1d\x63\x54\x77\xef\x8c\x1d\x3e\x5b\xe9\xcd\x86\xaf\xcc\xb6\x14\x52\x49\x3a\xaf\xe2\x12\x22\xbb\x27\x3e\x3e\x4b\xd3\xc4\x37\xdc\xe5\xe2\x1c\x0b\x64\x5b\xbc\x9c\x5d\xeb\x9d\x5b\xe6\xbf\xa1\xad\xc7\xaa\x75\x69\x2f\xa0\xa4\xe7\xc9\xec\x7a\x2a\x73\xf8\x36\x9f\x25\x87\x72\xf5\xd2\xb5\xce\xfe\x19\x81\x53\x44\x38\x97\x17\x39\xaa\xf4\xa8\xed\xf7\xb3\xed\x5d\xf3\xd4\x76\xe1\x0b\x03\x21\x83\xaf\xf5\xda\x7c\xe9\x6f\xfc\x6e\x75\xe7\x45\x73\x9a\x3f\x75\x63\x48\x07\x8f\xaf\x94\x56\x4e\x94\x08\x8d\x24\xf9\x67\x9e\xd8\x08\x61\x66\xef\x78\x67\xa3\x9a\xb9\x03\x6f\x6d\xec\x14\xd1\xef\x05\x81\xfe\x0c\x25\x9f\x2f\x43\x89\x5c\xaf\xd8\x88\xe3\xbe\xb2\xfe\x65\x8e\xad\x55\x3d\x4e\x5a\xca\x1f\x86\x0d\x06\x79\x4c\x17\xca\x2f\xee\xfe\xdb\xcf\xa9\xce\x4a\xe3\x64\xdc\x7a\xa5\xdc\x1e\x55\xe4\xc8\xcd\x99\x77\x53\xc3\xd9\x58\x68\xae\x73\x02\x57\x97\xaa\x99\x43\x29\x07\xd3\x05\x12\xf6\x96\xf0\x26\xdf\x3e\x16\x57\xb3\x47\xe7\xf5\x12\x81\x59\x90\x96\x2e\xcd\x59\x96\xad\xa6\x92\xfc\xba\xf4\x74\xc3\x4e\x4c\x8c\x05\x50\xc3\xac\x44\xd0\xd5\xa0\xe9\x67\xa0\xa5\xbd\x3c\x51\x82\x8b\xea\x68\x27\xbf\x27\x36\xb8\x29\x4f\xfa\x9f\xb6\xb2\x22\xa0\x6e\xdc\x9b\xef\x60\xa4\x4c\x7a\x78\x1b\xc6\xed\xe0\x65\xe1\x31\x4e\x2a\x3b\x7a\x52\x7c\x6f\xc7\xbf\x4e\xa2\xa3\x29\xbb\x63\xe1\xb1\x8a\x5f\x74\x8a\x66\x3f\xff\xac\x25\xb2\xa7\x53\x4a\xfc\x11\x3b\x9a\x79\xf9\xd5\x8a\x78\xf5\xfe\xd0\x2b\x8f\x70\x0d\x3f\x4b\x39\x1c\x91\x53\x27\x93\xd4\xd6\xd9\x30\x7f\xfc\xc3\xa5\x16\xe5\xb5\x2c\x61\x78\x8e\xcd\xe3\xd1\x4e\x79\x8e\xc8\x96\x88\x61\x53\x9a\x4b\xdd\x39\x69\x97\xf9\x1b\xe2\x4b\x55\xf7\x69\x45\x85\xb6\xbc\xbb\x74\xc2\x9f\xaf\x34\x37\xd2\x02\x7d\xe0\x43\x69\x33\x87\xbe\x47\x91\x9c\xeb\x5a\x7a\x8a\x1b\x6c\x32\xd5\xbd\xcc\x4f\x5c\x4b\x83\xbb\xf7\xff\x68\x00\x6f\x6e\x19\x6c\xc0\x57\x1e\x8a\x25\x47\x52\x49\xee\xaf\x24\xeb\xb4\x53\xeb\xa3\xe8\x11\x2d\xbf\x14\xdd\x25\x4a\xb1\x98\xcd\xf4\xf8\x36\x8b\xb6\xd7\x89\x98\xd0\x2a\xf6\x7a\xe0\xb0\xc8\xad\x7d\xa4\xd3\x5e\x21\xf3\x8c\xbf\x3a\x32\x57\x7f\xf0\x7d\xfc\xc9\x7b\x6d\xc7\xd8\xae\x7d\x13\xf6\x13\xad\xd5\xf1\xa1\x23\x3c\x6c\xca\xde\x13\xf6\xc3\x9e\xf2\xd0\xd5\xe0\x83\xf0\x1a\x0a\xd1\xb9\xf8\xd9\x4a\x67\x30\x09\x9b\xd2\xe6\x3c\x89\x75\x9e\xf2\xff\xab\xfb\x3e\x2e\x4a\x9b\x29\x26\xc1\x68\x96\x3b\xe1\xa4\x97\xfc\xcd\xc0\x6f\xb5\x98\x1c\x67\xc4\x07\x4b\xc3\xcb\x20\x9f\x49\x0c\xec\x95\x43\x6a\xc9\x44\x4e\x5b\x50\x2e\x3f\x84\x26\xe3\x86\x0f\x51\x4b\x08\x6c\x4a\xe9\xad\x54\x8e\x6d\x1e\x0e\xd5\x00\x97\x3c\x33\x5a\x5b\x47\x99\x12\x5f\xe3\x5d\xb7\x8f\xc3\x45\x44\x6b\x25\x57\xf6\xed\x87\xcc\x12\x21\x59\xe8\xe1\xf2\xd8\x25\xe8\x92\x17\x8e\xc9\xd2\x9a\x1b\x3c\x93\xdb\x84\xb2\xb3\xf4\xa8\xe3\x4d\x75\xbd\x1b\x20\x41\x24\x74\xd5\xea\xa9\x21\x01\x3d\xb1\x4b\xd0\xa5\xa3\xef\x20\xf9\xbd\xe2\xf9\x27\x6d\x5f\xa3\xd6\xc2\x1b\xaf\x92\x69\xfc\x67\xd3\x39\xe5\x09\xd5\x3c\x8f\x45\x0e\x45\xd3\x15\x44\xb9\x64\xeb\x63\x88\x34\xe5\xc4\x79\x4e\xbc\x72\xaf\x6c\xee\x27\x11\x94\x4b\x38\x6b\x33\x4d\xbf\xe7\x69\xd4\x50\x3a\x35\x57\x56\xf0\x60\xd9\xb0\xd5\x2c\x77\xdf\x09\x0b\x6d\xcd\xd9\xa3\x66\xd0\x6c\x90\xa9\x01\x0f\x0a\xa2\x0f\xdf\x86\x00\xc2\x79\x2b\x2b\x56\xed\x7e\x36\x5e\x96\x4a\xd3\xaf\xa8\x14\x4f\xdf\xfc\xb0\x1d\x63\x2d\x1d\x47\xa6\x2a\xae\x05\x38\x40\x9c\x7f\x4d\xbe\xdc\x1e\x3d\x5d\x92\xce\xab\x25\x82\x5d\xe5\xe2\x4f\x83\xdf\x9f\xac\xe8\xdc\x7d\x27\x8d\x1e\x7d\x23\x0e\xce\x3f\x7e\x48\xaa\x28\x89\xe9\x6a\xcc\xa8\x4b\x06\x9a\xbf\x13\x3f\xe3\x73\x3c\xe7\x4e\x0a\x31\x30\x59\x7e\x25\xe0\x8c\x0c\x4e\xf0\xad\xc6\xe9\x3f\xf6\x23\x35\x5e\x1f\x88\x41\x0f\x4a\xc7\xfa\xab\x0e\x15\x63\x54\x0e\x2c\xcf\x51\xdc\xa7\x96\x93\xc4\xb9\x2e\x5b\x9d\x37\x0d\x5a\x7c\xbf\x94\x66\x81\x53\x9b\xad\x65\x2f\x6b\x26\x06\x76\xff\x35\x40\xe8\xaf\x9c\xfa\x03\x38\x53\x8c\x2e\x71\x33\x7d\x5f\x02\x7a\x69\xf2\x6d\x4b\x7d\x2c\x25\x4b\x84\x26\x2d\xc1\x34\x21\xf6\x59\x3f\x31\x44\x69\x02\x8c\x86\x07\x59\x07\x6a\x56\x7d\xcc\xcc\x8a\x12\x5b\xeb\x2c\xc1\xcf\x14\xe8\x22\xfe\x7c\xa3\xfe\xd1\xf1\x7c\xdd\x7f\xd8\x99\x13\xe6\xfd\x12\xa4\x59\x2c\xdb\xfc\xf7\x34\xf5\xa9\x8d\x22\xf8\x0e\xcf\x8e\xcc\x18\x0d\xf2\x13\xe8\x7b\x33\x37\xfd\xd8\x6a\x8b\x53\x93\xda\x89\xdd\x3e\xde\xa1\x1c\x2f\x2c\xf6\x97\x87\x89\xc6\xb5\x57\xae\x0f\x2a\x2c\x1d\xf1\xf6\xfb\xa3\xee\x69\x0a\xf4\xd7\x5f\x42\x4b\x34\x6b\x7c\x64\xe9\x34\xe9\xd7\x4d\x07\x57\xfa\xff\xba\xda\x52\xac\xd6\x47\xf2\x8a\xea\x88\xf9\xb4\x16\x1b\x32\x5d\x53\x71\xd9\xc8\xcc\x71\x5c\x04\xae\x22\x47\xec\xde\xe5\xf9\xd2\xac\xf2\x0c\x29\xb5\x83\x63\xa3\xea\x06\xfe\xa7\x99\x52\xbc\x0c\x5c\x64\x8e\x4b\xfb\x36\xb6\x9f\xb9\x2e\x62\xbd\x01\x9b\x37\x59\x2e\x91\x33\x85\x14\x6f\x98\xac\x7a\x5a\xf2\x62\x1c\xcd\x9a\xdf\x13\xc5\x98\x3a\x1a\x9f\x9d\x18\x47\x11\x2f\xd0\x61\x76\xa7\x7e\xd9\x7b\xf5\x38\x25\x35\x93\xab\xc7\x2b\x83\xe2\x83\x09\xb2\x4a\xe5\x61\xee\x34\xe7\x9d\x8e\x2f\xc2\x6e\xd5\xc4\x3d\xb1\x27\x2c\xce\x84\x8d\x75\xe0\x57\x0f\xac\xb1\xf3\x5b\x44\x25\xa2\xb3\x51\x01\x1b\x4a\xa4\xde\xed\x0b\xeb\xf7\x6c\x60\x73\x06\x82\xf8\xc8\x20\x19\x79\x27\x3d\x16\x32\x2b\x76\x9a\x39\xf4\x55\x55\xdb\x55\xe4\x88\x7f\x2f\x89\x10\x6f\xac\x98\xc8\x8d\xd8\x82\x67\x95\x94\x38\x5f\x5d\x41\x79\xcd\x83\x17\x06\xcf\xdd\x53\x57\x3d\xf8\x24\x7b\xc3\x43\x0d\x25\xca\x95\x31\xdf\xfe\xcf\x44\x1e\x58\x54\xfa\xfa\x95\xaa\xc8\xee\xb9\x25\x95\x45\xe0\x02\x52\x96\xd2\x73\x76\x85\xf9\x88\x16\xdd\xf2\xa2\xdc\x16\x40\x1d\x5e\xbe\x59\xeb\x56\xe8\xc0\x79\x1c\x3f\x1e\xd0\x97\x70\x4f\x04\xa0\xf5\xb6\xd3\xce\x63\xb7\x72\x0f\xff\xc7\xf6\x36\x08\xc0\x68\x9a\x56\xe6\x72\x35\xe7\xcf\x4b\x7f\xc4\x70\x4d\x74\xb8\x1c\x00\x7d\xde\x39\x7e\xfc\xd1\x1f\x6a\x5a\x18\x26\x48\x3a\xf6\x9e\x10\x40\xeb\x95\x49\x2b\x8e\xc2\x02\x8e\xe2\xe8\xb9\xaf\xe5\x24\xc0\xce\xd8\x03\x96\x1c\x85\x74\x1c\x9d\x59\x5a\x7c\xcc\x40\x04\x00\xb5\x1d\x1b\xca\xfe\xcf\x46\x39\x82\x07\xf0\xe0\x5f\x62\x9e\x38\x15\x40\x00\x5b\x00\xd0\xf6\x7b\xc6\xda\x1e\xf8\xa9\xd1\x65\xf8\x0e\xec\x2c\x02\x80\x72\xb1\x6b\x5f\x96\x7a\x22\x1e\x3d\x45\x7b\x7f\xa5\x90\x00\x02\xd2\x96\x7a\x06\x43\xce\x15\xff\xe7\xed\x53\x98\xa8\xa8\x9d\x1a\x76\x43\xac\xff\x6d\xcf\xc1\xe6\xd3\x98\xfb\x9f\xff\x0b\x44\x8e\xfe\x07\x6c\xbb\xef\x32\xc9\x35\x7f\x8e\xec\xa2\x48\x89\x80\x91\x08\x80\x75\xd8\xd2\xd0\x89\xe3\xc7\x09\x10\xfe\xcf\x2b\x9a\xc9\x6e\xf9\x1b\x36\x43\x7d\xfb\x41\xe8\x3f\xaf\x6d\xe4\x85\xf5\x07\x9b\x1f\xb7\xe2\x3c\x4a\x79\x3d\xdc\x85\x01\x14\x64\xb3\xc5\x38\x76\xc7\xbc\x31\x39\xbe\x17\xb4\x14\x00\x6c\x4c\xdd\xf6\xb0\x8b\x1f\xcd\xe2\x28\xfb\x48\x4e\x08\xa0\xe3\x3b\xd9\x8b\x25\xbc\x56\xf0\xfd\xc3\x87\xbe\x37\x22\xfb\x14\x01\x52\xef\xee\x66\x9d\xfd\xb5\x58\xf2\xc0\x14\x1e\xd5\xbd\x6e\x49\x20\x04\x47\xe2\x10\x40\xab\xae\x44\x3f\x38\xff\x6b\xb1\x84\xd7\x19\xf7\xfd\xdb\x68\x6e\x0b\xf9\x95\x31\x17\x1e\xd0\x5b\xbb\x7c\xff\x04\xca\x5b\xda\xc0\xfd\x0f\x34\x3d\x65\x80\x54\xeb\x23\x8a\x54\x9d\xb3\x45\x98\x1c\x87\x4d\x1b\x60\x67\x46\x5d\x7b\xf6\x6f\xe7\x8d\xd7\x0e\x3e\x28\x82\xc7\x4e\x99\x0a\x00\x39\x2e\x09\x6e\x6b\x1d\x37\x83\x09\xe8\x46\x93\xcb\x3b\x36\xe0\x30\x60\xbe\xe3\xbc\xd7\x6c\x49\x5a\x30\x01\x2d\xea\x2e\x21\xfe\x71\x22\x01\x2d\xf4\x7a\x37\xbb\xf9\xcd\xfe\x86\x2f\x38\x4a\x09\x5e\x62\xab\xb6\xfd\x6e\x04\x72\xc4\xb4\x19\x23\x43\x63\xee\x5a\x9a\xb8\x1f\x09\xd1\x14\x40\x4b\x5c\xb6\x31\xfd\xfe\x9c\xc4\xe7\x98\x71\x63\x7c\xf0\x00\x11\xc3\x43\x74\x77\xc5\x42\x61\xd4\xf8\xe7\x65\xfe\x68\x80\x16\x87\x89\x80\xb1\xda\x2f\x63\x38\xca\x98\x0e\xa2\x6d\x28\x05\x70\x91\xe2\xd9\xb5\x2a\xec\x9c\x94\xf0\xfb\x81\x15\x9c\x27\x25\xff\x8b\x72\x67\x08\x95\x3d\xb0\x82\xef\xab\x9e\xce\x85\x3f\xf8\x01\x82\xb2\xb5\x8a\xf7\x1e\x3a\x40\x37\xc2\xe4\x9c\xd7\x84\x62\x00\xf4\xa9\xcb\xa5\xf1\xd7\x5b\x07\x64\xa0\x1c\x03\xb0\x3b\x78\xf5\x1b\x55\x67\x40\x06\x12\xec\x00\x68\x16\x7f\x17\x87\xe3\x8b\x0b\x3f\x4b\xbb\xc9\xfe\x67\x90\xad\x49\x63\x67\x54\x1d\x02\x6f\x3c\x80\xcb\x90\xf3\x58\xed\x97\x77\x38\x4a\x1b\x2f\xc0\x1b\xa9\x03\x63\xff\x60\xd3\x7f\x70\xb3\x99\x67\x6b\x04\x6b\xc6\xe4\xf3\xb1\x01\xb9\xff\xe5\x35\x7a\x79\x3c\x88\xe2\x99\x72\xb1\xb1\xf7\xff\x07\xe8\x87\xb2\xac\x6e\x72\x3c\x9c\x0e\xfc\x5d\x0c\xa6\xf5\x4b\x68\x31\x57\xc3\xdb\x42\x0f\x62\x80\x52\x57\xbe\x48\x3e\x7b\xa2\x29\x64\xee\x41\xb7\x08\xfa\xc8\xa9\x5f\x92\xfe\xd3\x03\x28\xcb\xcd\x67\x04\x7b\x4c\x0a\x70\xe6\x29\xa0\xf6\xdc\xfd\x47\x65\x68\xa8\x0a\x02\x59\xa5\xbf\x82\xf5\xdc\x56\xfd\x0a\x37\x13\x50\x6d\xea\x89\xde\xfd\x0b\x00\xe8\x24\x23\xfc\x4f\x49\x66\xbb\x0c\x24\x3c\x8d\x06\x54\x89\x11\xf5\xa7\x24\xf3\xe8\x06\xe4\x95\xc4\xff\x9d\x2b\x33\xd5\x2f\x0e\x78\x9d\x5e\xb0\x38\x68\x53\xc4\x8b\xfa\xba\x34\x79\x48\x02\x24\x0b\xf9\x3d\x54\x8b\xe4\xd8\x15\x3c\x05\x35\xdd\x10\xc6\xc2\xdc\xce\x77\x61\x76\x00\xd7\x94\xc7\xd7\x89\x83\x1a\x72\xb2\x36\x95\xbc\x94\xdb\xdf\xb0\x86\x83\x61\xd5\x0f\x11\xb0\x7e\xa0\xba\xcc\x72\x4e\xd3\x40\x5a\x97\x6e\x4c\x72\x03\x7c\xca\x7d\xef\x3e\xb3\xbe\x5e\x35\x1e\xaa\x5c\x60\x01\x36\xf9\xa7\x3d\x3f\x60\x31\x40\xd9\x73\xb8\x3a\xf5\x5f\x4e\x4b\x48\xad\x11\xf3\x48\x3c\xb5\x53\x10\x5a\x45\xfa\x17\xc8\x67\x99\xcf\xcb\x8e\x9e\x2b\xd8\x8e\x71\x09\x18\x9e\xfd\xd3\xd1\x56\x3b\x7d\x0c\x03\x14\xee\x23\xbd\x8c\xc9\xd5\xdf\x19\x8a\x47\xb4\x76\x47\xe2\x73\x7c\x6a\xf4\x34\xeb\x6e\x91\x80\x22\x2f\x21\x2d\x3a\x3e\x68\xf3\xf7\xde\x6a\xa3\x0c\x68\x37\xe4\x62\xb7\x6f\xc0\x63\xe0\xce\xd3\x0f\xad\x83\xe7\xd2\x7c\x10\xb5\x29\xad\x73\x61\x3f\x1c\x16\xee\x00\xb4\xda\x08\x48\x7a\x2b\xdd\x7a\x9a\xdc\x68\x03\xf7\x25\x0e\x46\x6b\x44\x58\x02\x24\x7f\xb6\xa0\xda\xf2\xb1\x8b\xce\x11\xd0\xf6\x74\xec\x5b\x71\x80\xd6\x78\xce\x42\xfd\xab\x1b\x87\xf0\xb6\xcf\xf1\xad\x09\x19\x08\x58\x9f\x5a\xae\xf8\xf2\xdb\x59\xc0\xdc\xc8\xf6\x25\xa6\xf5\xa1\x26\xf6\xf2\x85\xf7\x60\x6d\xd4\xa1\x79\x82\x72\xca\xb6\x12\x93\xc3\xf6\x99\x9c\xf7\xd8\x72\xf7\x75\x9e\x64\x6b\x0e\x80\x79\x89\xd6\x9c\xfb\xb2\x05\xd5\xef\x49\x04\x26\xc7\x14\x03\x39\x12\x3c\x6c\xcd\x14\xe8\x53\x51\x0d\xb9\xd0\xd0\x35\xe9\x6c\x3c\x89\x02\x98\x37\x34\x9b\xf7\x1f\x70\xfa\x1c\xcf\x09\x51\xd0\xc5\xb6\x9a\x93\xa5\x9e\x29\xd9\x78\x61\x37\x83\x4d\x62\x7c\x4d\x9d\x30\x33\x70\x07\xe2\x3d\x97\xbd\xc6\x88\xad\x90\x41\xc0\x5a\xf5\x52\x77\x2d\xaf\x00\x73\x31\x06\xe7\x71\xef\x8b\xb0\xb3\x55\x99\x34\x40\x63\x7e\x63\x28\xcb\xfe\xe3\x06\x0a\x77\xf8\x4d\x17\x3d\xdf\x67\x61\xfa\x00\x03\xbe\x38\x8e\xb0\x73\x12\xaf\x1d\x09\x71\xc0\x9b\x01\x30\xae\xd3\x6b\xb5\x85\x50\xf5\x03\xff\x4e\xec\xb1\xd5\xfc\x33\x80\x03\x42\x59\x0f\x0e\xd4\x14\xb7\x5f\x8f\xea\x32\xc0\xb4\xe2\xed\xb8\xb0\xa8\x55\x51\xf5\xd0\x6b\x67\x19\x70\xdd\x41\x00\x20\x0a\x32\xdf\x19\x61\x5a\xb1\xb7\x00\x82\xc4\xdf\xd7\x8a\xc9\x80\xee\x3f\xb3\xd8\x26\x66\xb8\x11\x46\x10\x6b\x06\xf0\xe6\x32\x9d\xc8\x4d\xa0\x1c\x96\x41\x80\x8e\x89\xa1\xba\xc8\x00\x21\x19\x07\x94\x14\x1e\xb6\xd9\x0b\xde\xfd\x08\xeb\x13\x9f\x91\x25\x2f\x90\x4e\x5f\x6a\xc9\xa6\x67\xe3\x08\x14\x03\x4e\x58\x44\xd3\x96\x40\x6e\xa0\x44\x9d\xff\x48\x3c\x14\x75\x2b\x1f\x9f\x2c\xd4\xc5\x85\xa5\x2c\x3b\xcc\x64\x48\x0a\xa1\x9b\x53\x70\x40\x09\xe1\x61\x8f\xed\x40\x4c\x7f\x66\x1d\xcc\x4e\x14\x17\xae\x52\x02\xf4\xfb\xf7\xe2\xc8\xb5\x5a\xcb\x33\xba\xca\xa6\xfc\x04\x0a\x97\xa4\x0e\x73\x2c\x4a\x3a\xb2\xe4\x5f\x3b\x05\xa6\xb4\x66\x09\xc7\x8b\x97\x86\x18\xae\xa7\xf1\x4f\x76\x20\x0d\x0d\x0d\x3f\x3a\xa5\x84\x2f\x07\x3a\x48\xc7\x62\x60\xf4\x5e\xa9\xaa\xe7\x47\x3a\x6d\x98\x21\xf0\x08\xff\x44\x1a\x69\x68\x25\x27\xba\x4a\x6e\xc0\x2e\x88\x9a\x01\x2c\x1a\xc7\x52\x05\x7b\x4c\xa8\x3a\x35\x17\x8c\x93\x09\xcf\x11\x2f\x53\xf2\xd9\x25\xe1\xfd\x6b\x52\x3f\x15\xe1\x7c\xe0\x7d\x7a\xb6\xae\x10\x7a\x63\x4c\x7e\x36\x82\xb5\x96\x5f\x59\x6a\x0e\xe0\xf7\xa2\x64\xac\xfb\x70\x0c\x75\xd9\x1b\x19\xbe\xe6\x17\x03\xad\xe2\xdd\xcc\x79\x23\x4c\xf2\xbe\xe7\x04\x0c\xcd\xbc\xab\x21\x78\x24\xc9\x5f\x5c\x6c\xf8\x2a\x2e\x07\xfa\x04\x01\x95\xdb\xe7\x67\x71\xf1\x7f\xaa\x11\xe3\x92\x02\x9b\x2f\x6f\xc5\x40\xd1\x4d\x33\xce\x89\xbd\x38\x07\x41\xd6\x65\x8e\x46\x15\x1b\x15\xf9\x78\x8e\x0f\x44\x4e\xba\x77\x94\x44\xe3\x38\x43\xd6\x48\x10\x7c\x8c\x86\xa2\xcb\xc7\xc9\x9a\xc9\xb0\xdf\xa0\x9b\xbb\xfb\xcc\xd4\x29\x27\x0c\x34\x5e\x8f\xaa\x3d\x64\x0a\xdb\x3e\xb8\x00\x2a\x94\xf1\xbc\x73\x0a\x31\x67\x4f\x7b\x70\xd1\xa4\xbe\xbe\x45\xad\x87\x1f\x02\xc5\xe7\xb2\xbc\xe3\xf4\x6f\x49\x05\xb8\x79\xc2\x1d\x80\xe3\xf3\x85\x3c\xbe\x12\x5c\x3b\x68\x26\x03\x1b\xf3\x65\x81\x66\x5f\x35\x3c\xa2\xe7\xf4\xdf\x7f\xab\x5f\x66\xac\x1e\xa3\xd9\xd0\xad\x22\x60\x75\x63\x5b\x65\x44\xab\xb1\x32\xf2\xef\xaa\xd3\xcc\x52\x99\x91\x06\x18\xfa\x26\x27\x0c\x6c\xab\x4b\xad\x3d\x6d\x0a\x07\x3e\x0d\x91\x20\x80\x60\x65\x29\x39\xa8\x21\x37\xcb\x4b\x40\x0b\x2f\x5a\xf3\x50\xda\xce\xb7\xee\xd9\x30\xd9\xa1\xb4\x3c\x27\x7c\x10\xcd\x6a\xff\x25\xc1\x85\x43\x36\x9f\x3e\xdb\x9a\x44\x9f\xd9\xa3\x00\xb2\x39\xd8\x18\xe3\x66\x3e\x1b\x2c\x9c\x4c\x3c\x46\x0d\xb3\x46\x76\x46\xff\xab\xd2\x89\xdb\xf7\xd7\x2a\xef\x00\xc5\x6b\xff\x52\x3a\x41\x04\xbd\x95\x2b\x01\x24\xff\x07\x6b\x4d\xf1\xbc\x94\x2f\xd6\xfc\x90\xbc\xdb\x92\x75\xca\x08\x9f\xf3\x69\x79\xdb\xdb\x03\xdb\x2e\xff\xb5\x2d\x42\xe0\xd2\xe7\x27\xb5\x4c\x53\x50\x40\xab\x11\xa8\x2c\xec\x65\x7e\xb8\xcd\xc7\x17\xfc\xfb\xd8\xef\xde\xd3\xf8\x02\x23\x8c\xf5\xa6\x4a\xa3\xa0\x5c\x2f\xb7\x9b\xec\xd8\x19\x31\xf0\xfe\x98\x70\x78\xa9\x73\xaa\xe3\xcc\xc2\x53\x2e\x21\x34\xe1\xb8\xa9\x20\xcd\xdc\xcb\xee\x0c\xe3\xc1\x37\x41\x82\x42\xac\x14\x2d\xc1\x67\x23\xe1\x25\x85\x87\x96\x37\x3b\x1c\xc1\xca\x98\xc2\x71\xc6\x73\x31\xd6\xc7\xcb\xf1\xe8\x78\x5e\x9f\x80\x10\xb4\x8a\x82\xc2\xe5\xb1\x28\xb5\x14\xf0\xf8\xf2\xaf\xea\xf0\x42\x07\x0e\xed\x80\xd4\xb6\xa1\xf3\xd4\xcd\x8c\x24\x5e\x92\x0c\x38\x18\xf7\x62\x96\xee\x6f\x4b\xd4\x53\xcf\x12\x82\x02\x8f\xc4\xc8\x14\xbf\x48\x87\x9b\x38\xf0\x4f\xe1\x81\x53\x1e\x96\xdd\xe3\xbf\xae\x2f\xae\xb6\x6d\x24\x80\xdd\xe7\x1c\x12\x9d\xb8\x7d\xf6\xfa\x7d\xd6\x9e\xf6\x91\x7a\x5f\x53\x68\x15\x71\x55\xdc\xd6\xb9\xc9\xfd\x9a\xde\xf1\xab\x78\xf4\x36\x0f\x73\x14\x4f\x80\x4b\x1f\x12\x11\x9a\x2e\x2e\xea\x93\x3e\x06\x75\x30\xc5\xa1\x43\x9b\x18\x29\x38\x02\x08\x85\x4d\xce\x16\xae\x27\x7e\xcf\x2a\x50\x87\xe1\x38\x5a\x92\x5a\x32\x90\x44\x9a\x79\xe0\xa7\xf4\xac\xb4\x85\x0c\x98\x8a\x74\x73\x47\x2f\x6e\x62\xa4\xe0\x85\xe0\x64\x8c\x21\x98\xfa\x9e\x0d\x76\x1f\x8c\x79\xf0\x57\x92\xfc\x92\x82\x41\x0f\xab\x0b\x45\x53\xae\x8a\x6f\xc1\x55\x56\x6c\x61\xfc\xec\x8b\x7b\x96\x02\x24\xcd\x4b\xc1\x34\x89\x85\xcd\xc1\x18\x9a\xb9\xd3\x5f\x81\x87\x51\xe7\xfe\x77\x51\x1a\x10\x02\x9c\xd3\xfc\xdf\xb4\xe9\xd2\xde\x1e\x2c\x1a\x6f\xc9\x58\xea\xce\xfb\xc4\x50\x62\x45\x0e\x27\xe2\xe0\x4a\xae\x2e\xda\xa8\xaa\xea\x7f\x30\xcc\x5f\xaa\x2a\xea\xbe\x2a\x39\xc1\x00\x03\xf2\xa3\xea\x06\x9e\x7f\x48\xb6\x48\xaa\x58\x61\xe4\x9b\x2b\x38\xb0\xf4\xb1\xc3\xdf\x15\xa1\x38\xaa\x3d\x9f\x75\x0c\xbb\xdd\xa1\xe7\x72\x10\x03\xa7\x95\xdd\x64\xcb\x3d\xeb\xb8\xd6\xbd\x37\xd1\xf6\x5c\xbf\xcd\xac\xfb\xde\x62\x59\x73\x7e\xa5\x1c\x36\xee\x40\x50\xb1\xfb\xca\x18\xcf\xaf\x56\x66\x5c\x74\x39\x73\xe6\x2c\x1f\x01\xaa\xf2\x67\x04\x70\x42\x93\x62\x84\x65\x23\x7c\x17\x82\xf0\x7a\x7a\x1d\x54\x3c\x6d\x21\x58\xf3\xea\x85\x8a\xa6\x45\x3c\x51\x4f\x0a\x41\xbb\x4d\xb9\x92\xc9\x31\x7a\x8d\xd1\x38\x28\xd0\xcf\x80\x9f\xb9\x55\x49\xf3\x52\x08\x65\xd7\xd8\x4b\xa9\xad\xdf\x57\xae\xef\x5d\xcf\xb3\x9a\x97\x42\x2e\x9d\xff\x8d\x64\x0d\x1a\x91\x1f\xec\x18\xcd\xbf\x61\x0a\xd6\x97\x34\x4b\x44\xb9\xd6\x45\x4b\x44\xdb\x15\x28\x3a\xd7\x0b\x58\x94\x0f\x83\x5f\xe3\x82\x4f\xef\x9a\xca\x9f\x34\x05\x6b\xad\x1e\x85\xe4\xd6\x0d\x46\xd9\xac\xf5\x1f\xea\xd6\xa7\xbc\xba\x12\x32\xbb\x74\xd6\x84\x7e\xf8\x58\xae\x3e\x96\x10\x02\x9b\x81\x13\x06\x82\xb2\x98\x3a\x66\xea\xe5\x9c\xe6\x00\x1c\x73\xeb\xa9\x48\x2e\x86\xf5\x05\x19\xa0\xa9\x6a\x2a\xe2\xbe\x5e\x46\xef\x44\x67\xab\xd1\x27\x44\x6b\x76\xd2\x4d\xe4\x23\x30\x7a\x41\x8a\x42\xe0\xc1\xaf\x6d\xbd\xa5\xeb\x2f\x37\x76\xef\x47\xbb\x18\x57\x1b\x82\x4b\xf9\xc5\x4a\x4d\x21\xb7\xcd\x7e\x8f\x46\x0e\xde\xbb\x2f\x31\xa8\x2d\x0d\xda\x3b\x10\x38\xe1\x70\x6f\xb3\xa1\x08\x56\x01\xaf\xf5\x56\xfd\xc7\x21\x87\x8c\xac\x6d\x1c\x64\x22\x24\x5a\x32\x05\x60\x57\x5b\x2f\x17\x5f\xa4\xea\x8f\x33\x44\x6f\x03\xca\x89\x64\x43\xe9\xd6\x96\x1f\x3b\x1b\xb6\x96\x8a\x15\x3f\x69\xf9\x3e\xf1\x5e\x4d\x3e\x9c\xb4\x5d\x06\x28\x75\xff\x1b\xd3\x9e\x70\x26\x3f\x8c\x8f\xde\xfb\x2b\x27\xad\x47\x20\xff\xa1\x87\x8a\xf9\x46\x2e\xb3\x8d\x01\x5c\xab\xb4\xef\xb5\xed\xc2\xd5\xfc\x27\xbd\x8e\xeb\x7b\xf1\x2c\x52\x5e\x99\x02\x8a\xf1\x39\xd7\xba\x67\x2b\x77\x45\xa6\xc7\x19\x7f\x6b\xe1\x98\xc7\x05\xb3\x07\x3d\xcf\x2c\x7a\x8a\xcf\xea\x98\xb5\x4e\x85\xd9\x71\xcf\xa9\xd1\x5a\x0c\x65\xd7\xad\x44\xdd\xbf\x3c\x6d\xc1\x2d\xb1\xef\x85\xbd\xd5\xf8\x68\x2d\x69\xbd\x4d\x08\x68\xd8\x8f\x47\xc2\x73\xdd\x7d\xb1\xaf\x4a\xf4\x3c\xfc\xf7\x0b\xfe\x20\x99\x6f\x16\xed\xde\xee\xf7\x21\x67\x70\xd0\x54\xe2\x7e\xde\xb0\xbe\x93\x70\x9c\xfe\x58\xcf\x09\x7a\x00\xeb\x1a\xd7\xdc\xdf\x16\x47\xc5\xc4\x6c\xe7\x0d\xa3\x89\x7b\x76\x24\x66\x77\xdc\x7e\xe7\xf8\xf8\xeb\xb7\x43\x91\x77\x3a\x74\xe0\xff\xfc\x92\x93\x38\x88\x43\xf1\xd5\x1f\x3b\x0c\x6e\xf5\xfc\xc3\x66\x46\x96\x86\xcf\x0d\x3c\x62\xfe\x5f\x00\x00\x00\xff\xff\x1a\x43\x0f\x06\x94\x36\x00\x00")
func init() | {
rb := bytes.NewReader(FileAssetsYcMlTranslatePng)
r, err := gzip.NewReader(rb)
if err != nil {
panic(err)
}
err = r.Close()
if err != nil {
panic(err)
}
f, err := FS.OpenFile(CTX, "assets/yc/ml/translate.png", os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0777)
if err != nil {
panic(err)
}
_, err = io.Copy(f, r)
if err != nil {
panic(err)
}
err = f.Close()
if err != nil {
panic(err)
}
} |
|
arc1.rs | // arc1.rs
// Make this code compile by filling in a value for `shared_numbers` where the
// TODO comment is and create an initial binding for `child_numbers`
// somewhere. Try not to create any copies of the `numbers` Vec!
// Execute `rustlings hint arc1` for hints :)
// I AM DONE
use std::sync::Arc;
use std::thread;
fn | () {
let numbers: Vec<_> = (0..100u32).collect();
let shared_numbers = Arc::new(numbers); // TODO
let mut joinhandles = Vec::new();
for offset in 0..8 {
let child_numbers =shared_numbers.clone();
joinhandles.push(thread::spawn(move || {
let mut i = offset;
let mut sum = 0;
while i < child_numbers.len() {
sum += child_numbers[i];
i += 5;
}
println!("Sum of offset {} is {}", offset, sum);
}));
}
for handle in joinhandles.into_iter() {
handle.join().unwrap();
}
}
| main |
ret.go | package http
import (
"encoding/json"
"net/http"
)
const (
// OK ok
OK = 0
// RequestErr request error
RequestErr = -400
// ServerErr server error
ServerErr = -500
)
// Ret ret.
type Ret struct {
Code int `json:"code"`
Data interface{} `json:"data,omitempty"`
}
func writeJSON(w http.ResponseWriter, code int, data interface{}) (err error) {
// write header
header := w.Header()
header["Content-Type"] = []string{"application/json; charset=utf-8"}
// write body
ret := Ret{
Code: code,
Data: data,
}
b, err := json.Marshal(ret)
if err != nil { | return
} | return
}
_, err = w.Write(b) |
common.js | var HtmlUtil = {
/*1.用浏览器内部转换器实现html编码(转义)*/
htmlEncode:function (html){
//1.首先动态创建一个容器标签元素,如DIV
var temp = document.createElement ("div");
//2.然后将要转换的字符串设置为这个元素的innerText或者textContent
(temp.textContent != undefined ) ? (temp.textContent = html) : (temp.innerText = html);
//3.最后返回这个元素的innerHTML,即得到经过HTML编码转换的字符串了
var output = temp.innerHTML;
temp = null;
return output;
},
/*2.用浏览器内部转换器实现html解码(反转义)*/
htmlDecode:function (text){
//1.首先动态创建一个容器标签元素,如DIV
var temp = document.createElement("div");
//2.然后将要转换的字符串设置为这个元素的innerHTML(ie,火狐,google都支持)
temp.innerHTML = text;
//3.最后返回这个元素的innerText或者textContent,即得到经过HTML解码的字符串了。
var output = temp.innerText || temp.textContent;
temp = null;
return output;
},
/*3.用正则表达式实现html编码(转义)*/ | htmlEncodeByRegExp:function (str){
var temp = "";
if(str.length == 0) return "";
temp = str.replace(/&/g,"&");
temp = temp.replace(/</g,"<");
temp = temp.replace(/>/g,">");
temp = temp.replace(/\s/g," ");
temp = temp.replace(/\'/g,"'");
temp = temp.replace(/\"/g,""");
return temp;
},
/*4.用正则表达式实现html解码(反转义)*/
htmlDecodeByRegExp:function (str){
var temp = "";
if(str.length == 0) return "";
temp = str.replace(/&/g,"&");
temp = temp.replace(/</g,"<");
temp = temp.replace(/>/g,">");
temp = temp.replace(/ /g," ");
temp = temp.replace(/'/g,"\'");
temp = temp.replace(/"/g,"\"");
return temp;
},
/*5.用正则表达式实现html编码(转义)(另一种写法)*/
html2Escape:function(sHtml) {
if(sHtml == undefined || sHtml == null || sHtml.length == 0) return "";
return sHtml.replace(/[<>&"]/g,function(c){return {'<':'<','>':'>','&':'&','"':'"'}[c];});
},
/*6.用正则表达式实现html解码(反转义)(另一种写法)*/
escape2Html:function (str) {
if(str == undefined || str == null || str.length == 0) return "";
var arrEntities={'lt':'<','gt':'>','nbsp':' ','amp':'&','quot':'"'};
return str.replace(/&(lt|gt|nbsp|amp|quot);/ig,function(all,t){return arrEntities[t];});
}
};
function getFormJson(formID) {
var fields = $('#'+formID).serializeArray();
var obj = {}; //声明一个对象
$.each(fields, function (index, field) {
obj[field.name] = field.value; //通过变量,将属性值,属性一起放到对象中
})
return obj;
}
//全站ajax加载提示
(function ($) {
$(document).ajaxStart(function () {
var index = layer.load(1, {
shade: [0.1, '#fff'] //0.1透明度的白色背景
});
});
$(document).ajaxStop(function () {
layer.closeAll('loading');
});
//登录过期,shiro返回登录页面
$.ajaxSetup({
complete: function (xhr, status,dataType) {
if('text/html;charset=UTF-8'==xhr.getResponseHeader('Content-Type')){
top.location.href = '/login';
}
}
});
})(jQuery); | |
lexer.rs | use crate::symbol::Symbol;
use crate::front::Span;
use crate::front::token::{Token, BinOp, Delim};
pub struct Lexer<'s> {
source: &'s [u8],
position: usize,
}
impl<'s> Lexer<'s> {
pub fn | (source: &'s [u8], position: usize) -> Lexer<'s> {
Lexer { source, position }
}
pub fn read_token(&mut self) -> (Token, Span) {
self.scan_whitespace_or_comment();
let low = self.position;
let token = if is_ident_start(self.current()) {
self.scan_ident_or_keyword()
} else if
is_digit(self.current()) ||
self.current() == Some(b'$') ||
(self.current() == Some(b'.') && is_digit(self.next_char()))
{
self.scan_real()
} else if [Some(b'"'), Some(b'\'')].contains(&self.current()) {
self.scan_string()
} else if is_operator(self.current()) {
self.scan_operator()
} else if let Some(c) = self.current() {
self.advance_byte();
Token::Unexpected(c)
} else {
Token::Eof
};
let high = self.position;
(token, Span { low: low, high: high })
}
fn scan_whitespace_or_comment(&mut self) {
loop {
match self.current() {
Some(b' ') | Some(b'\t') | Some(b'\n') | Some(b'\r') => (),
Some(b'/') if self.next_char() == Some(b'/') => {
self.advance_byte();
self.advance_byte();
loop {
if let Some(b'\n') | None = self.current() {
break;
}
self.advance_byte();
}
}
Some(b'/') if self.next_char() == Some(b'*') => {
self.advance_byte();
self.advance_byte();
loop {
match self.current() {
None => break,
Some(b'*') if self.next_char() == Some(b'/') => {
self.advance_byte();
break;
}
_ => (),
}
self.advance_byte();
}
}
_ => break,
}
self.advance_byte();
}
}
fn scan_ident_or_keyword(&mut self) -> Token {
let source = &self.source[..];
let low = self.position;
self.advance_byte();
while is_ident_continue(self.current()) {
self.advance_byte();
}
let high = self.position;
let symbol = Symbol::intern(&source[..high - low]);
if symbol.is_keyword() {
Token::Keyword(symbol)
} else {
Token::Ident(symbol)
}
}
fn scan_real(&mut self) -> Token {
let source = &self.source[..];
let low = self.position;
let radix = match self.current() {
Some(b'$') => {
self.advance_byte();
16
}
_ => 10,
};
while self.current().map(|c| (c as char).is_digit(radix)).unwrap_or(false) {
self.advance_byte();
}
if
radix == 10 &&
self.current() == Some(b'.') &&
is_digit(self.next_char())
{
self.advance_byte();
while self.current().map(|c| (c as char).is_digit(radix)).unwrap_or(false) {
self.advance_byte();
}
}
let high = self.position;
let symbol = Symbol::intern(&source[..high - low]);
Token::Real(symbol)
}
fn scan_string(&mut self) -> Token {
let delim = self.current();
let source = &self.source[..];
let low = self.position;
self.advance_byte();
while self.current() != delim && self.current() != None {
self.advance_byte();
}
self.advance_byte();
let high = self.position;
let symbol = Symbol::intern(&source[..high - low]);
Token::String(symbol)
}
fn scan_operator(&mut self) -> Token {
match self.advance_byte() {
Some(b'(') => Token::OpenDelim(Delim::Paren),
Some(b')') => Token::CloseDelim(Delim::Paren),
Some(b'[') => Token::OpenDelim(Delim::Bracket),
Some(b']') => Token::CloseDelim(Delim::Bracket),
Some(b'{') => Token::OpenDelim(Delim::Brace),
Some(b'}') => Token::CloseDelim(Delim::Brace),
Some(b'<') => match self.current() {
Some(b'=') => { self.advance_byte(); Token::Le }
Some(b'<') => { self.advance_byte(); Token::Shl }
Some(b'>') => { self.advance_byte(); Token::LtGt }
_ => Token::Lt
}
Some(b'=') => match self.current() {
Some(b'=') => { self.advance_byte(); Token::EqEq }
_ => Token::Eq
},
Some(b'!') => match self.current() {
Some(b'=') => { self.advance_byte(); Token::Ne }
_ => Token::Bang
},
Some(b'>') => match self.current() {
Some(b'=') => { self.advance_byte(); Token::Ge }
Some(b'>') => { self.advance_byte(); Token::Shr }
_ => Token::Gt
},
Some(b'+') => self.scan_binop(BinOp::Plus),
Some(b'-') => self.scan_binop(BinOp::Minus),
Some(b'*') => self.scan_binop(BinOp::Star),
Some(b'/') => self.scan_binop(BinOp::Slash),
Some(b'&') => match self.current() {
Some(b'&') => { self.advance_byte(); Token::And }
_ => self.scan_binop(BinOp::Ampersand)
},
Some(b'|') => match self.current() {
Some(b'|') => { self.advance_byte(); Token::Or }
_ => self.scan_binop(BinOp::Pipe)
},
Some(b'^') => match self.current() {
Some(b'^') => { self.advance_byte(); Token::Xor }
_ => self.scan_binop(BinOp::Caret)
},
Some(b'~') => Token::Tilde,
Some(b'.') => Token::Dot,
Some(b',') => Token::Comma,
Some(b';') => Token::Semicolon,
Some(b':') => match self.current() {
Some(b'=') => { self.advance_byte(); Token::ColonEq }
_ => Token::Colon
},
Some(c) => Token::Unexpected(c),
None => Token::Eof,
}
}
fn scan_binop(&mut self, op: BinOp) -> Token {
if self.current() == Some(b'=') {
self.advance_byte();
Token::BinOpEq(op)
} else {
Token::BinOp(op)
}
}
fn advance_byte(&mut self) -> Option<u8> {
if let Some((¤t, rest)) = self.source.split_first() {
self.source = rest;
self.position += 1;
Some(current)
} else {
None
}
}
fn current(&self) -> Option<u8> {
self.source.get(0).copied()
}
fn next_char(&self) -> Option<u8> {
self.source.get(1).copied()
}
}
fn is_ident_start(c: Option<u8>) -> bool {
(Some(b'a') <= c && c <= Some(b'z')) ||
(Some(b'A') <= c && c <= Some(b'Z')) ||
c == Some(b'_')
}
fn is_ident_continue(c: Option<u8>) -> bool {
is_ident_start(c) || is_digit(c)
}
fn is_digit(c: Option<u8>) -> bool {
Some(b'0') <= c && c <= Some(b'9')
}
fn is_operator(c: Option<u8>) -> bool {
[
Some(b'{'), Some(b'}'), Some(b'('), Some(b')'), Some(b'['), Some(b']'),
Some(b'.'), Some(b','), Some(b':'), Some(b';'),
Some(b'+'), Some(b'-'), Some(b'*'), Some(b'/'),
Some(b'|'), Some(b'&'), Some(b'^'), Some(b'~'),
Some(b'='), Some(b'<'), Some(b'>'),
Some(b'!'),
].contains(&c)
}
#[cfg(test)]
mod tests {
use super::*;
fn ident(id: &[u8]) -> Token {
Token::Ident(Symbol::intern(id))
}
fn keyword(id: &[u8]) -> Token {
let symbol = Symbol::intern(id);
assert!(symbol.is_keyword());
Token::Keyword(symbol)
}
fn real(real: &[u8]) -> Token {
Token::Real(Symbol::intern(real))
}
fn span(low: usize, high: usize) -> Span {
Span { low: low, high: high }
}
#[test]
fn spans() {
let mut lexer = Lexer::new(b"/* comment */ var foo; foo = 3", 0);
assert_eq!(lexer.read_token(), (keyword(b"var"), span(14, 17)));
assert_eq!(lexer.read_token(), (ident(b"foo"), span(18, 21)));
assert_eq!(lexer.read_token(), (Token::Semicolon, span(21, 22)));
assert_eq!(lexer.read_token(), (ident(b"foo"), span(23, 26)));
assert_eq!(lexer.read_token(), (Token::Eq, span(27, 28)));
assert_eq!(lexer.read_token(), (real(b"3"), span(29, 30)));
assert_eq!(lexer.read_token(), (Token::Eof, span(30, 30)));
}
}
| new |
lib.rs | // Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: MIT OR Apache-2.0
//! Tough is a client library for [TUF repositories].
//!
//! This client adheres to [TUF version 1.0.0][spec], with the following exceptions:
//!
//! * Delegated roles (and TAP 3) are not yet supported.
//! * TAP 4 (multiple repository consensus) is not yet supported.
//!
//! [TUF repositories]: https://theupdateframework.github.io/
//! [spec]: https://github.com/theupdateframework/specification/blob/9f148556ca15da2ec5c022c8b3e6f99a028e5fe5/tuf-spec.md
//!
//! # Testing
//!
//! Unit tests are run in the usual manner: `cargo test`.
//! Integration tests require docker and are disabled by default behind a feature named `integ`.
//! To run all tests, including integration tests: `cargo test --all-features` or
//! `cargo test --features 'http,integ'`.
#![forbid(missing_debug_implementations, missing_copy_implementations)]
#![deny(rust_2018_idioms)]
// missing_docs is on its own line to make it easy to comment out when making changes.
#![deny(missing_docs)]
#![warn(clippy::pedantic)]
#![allow(
clippy::module_name_repetitions,
clippy::must_use_candidate,
clippy::missing_errors_doc
)]
mod cache;
mod datastore;
pub mod editor;
pub mod error;
mod fetch;
#[cfg(feature = "http")]
pub mod http;
mod io;
pub mod key_source;
pub mod schema;
pub mod sign;
mod target_name;
mod transport;
use crate::datastore::Datastore;
use crate::error::Result;
use crate::fetch::{fetch_max_size, fetch_sha256};
/// An HTTP transport that includes retries.
#[cfg(feature = "http")]
pub use crate::http::{HttpTransport, HttpTransportBuilder, RetryRead};
use crate::schema::{
DelegatedRole, Delegations, Role, RoleType, Root, Signed, Snapshot, Timestamp,
};
pub use crate::target_name::TargetName;
pub use crate::transport::{
DefaultTransport, FilesystemTransport, Transport, TransportError, TransportErrorKind,
};
use chrono::{DateTime, Utc};
use log::warn;
use percent_encoding::{utf8_percent_encode, AsciiSet, NON_ALPHANUMERIC};
use snafu::{ensure, OptionExt, ResultExt};
use std::collections::HashMap;
use std::fs::create_dir_all;
use std::io::Read;
use std::path::{Path, PathBuf};
use tempfile::NamedTempFile;
use url::Url;
/// Represents whether a Repository should fail to load when metadata is expired (`Safe`) or whether
/// it should ignore expired metadata (`Unsafe`). Only use `Unsafe` if you are sure you need it.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ExpirationEnforcement {
/// Expirations will be enforced. You MUST use this option to get TUF security guarantees.
Safe,
/// Expirations will not be enforced. This is available for certain offline use cases, does NOT
/// provide TUF security guarantees, and should only be used if you are sure that you need it.
Unsafe,
}
/// `ExpirationEnforcement` defaults to `Safe` mode.
impl Default for ExpirationEnforcement {
fn default() -> Self {
ExpirationEnforcement::Safe
}
}
impl From<bool> for ExpirationEnforcement {
fn from(b: bool) -> Self {
if b {
ExpirationEnforcement::Safe
} else {
ExpirationEnforcement::Unsafe
}
}
}
impl From<ExpirationEnforcement> for bool {
fn from(ee: ExpirationEnforcement) -> Self {
ee == ExpirationEnforcement::Safe
}
}
/// A builder for settings with which to load a [`Repository`]. Required settings are provided in
/// the [`RepositoryLoader::new`] function. Optional parameters can be added after calling new.
/// Finally, call [`RepositoryLoader::load`] to load the [`Repository`].
///
/// # Examples
///
/// ## Basic usage:
///
/// ```rust
/// # use std::fs::File;
/// # use std::path::PathBuf;
/// # use tough::RepositoryLoader;
/// # use url::Url;
/// # let dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests").join("data").join("tuf-reference-impl");
/// # let root = dir.join("metadata").join("1.root.json");
/// # let metadata_base_url = Url::from_file_path(dir.join("metadata")).unwrap();
/// # let targets_base_url = Url::from_file_path(dir.join("targets")).unwrap();
///
/// let repository = RepositoryLoader::new(
/// File::open(root).unwrap(),
/// metadata_base_url,
/// targets_base_url,
/// )
/// .load()
/// .unwrap();
///
/// ```
///
/// ## With optional settings:
///
/// ```rust
/// # use std::fs::File;
/// # use std::path::PathBuf;
/// # use tough::{RepositoryLoader, FilesystemTransport, ExpirationEnforcement};
/// # use url::Url;
/// # let dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests").join("data").join("tuf-reference-impl");
/// # let root = dir.join("metadata").join("1.root.json");
/// # let metadata_base_url = Url::from_file_path(dir.join("metadata")).unwrap();
/// # let targets_base_url = Url::from_file_path(dir.join("targets")).unwrap();
///
/// let repository = RepositoryLoader::new(
/// File::open(root).unwrap(),
/// metadata_base_url,
/// targets_base_url,
/// )
/// .transport(FilesystemTransport)
/// .expiration_enforcement(ExpirationEnforcement::Unsafe)
/// .load()
/// .unwrap();
///
/// ```
#[derive(Debug, Clone)]
pub struct RepositoryLoader<R>
where
R: Read,
{
root: R,
metadata_base_url: Url,
targets_base_url: Url,
transport: Option<Box<dyn Transport>>,
limits: Option<Limits>,
datastore: Option<PathBuf>,
expiration_enforcement: Option<ExpirationEnforcement>,
}
impl<R: Read> RepositoryLoader<R> {
/// Create a new `RepositoryLoader`.
///
/// `root` is a [`Read`]er for the trusted root metadata file, which you must ship with your
/// software using an out-of-band process. It should be a copy of the most recent root.json
/// from your repository. (It's okay if it becomes out of date later; the client establishes
/// trust up to the most recent root.json file.)
///
/// `metadata_base_url` and `targets_base_url` are the base URLs where the client can find
/// metadata (such as root.json) and targets (as listed in targets.json).
pub fn new(root: R, metadata_base_url: Url, targets_base_url: Url) -> Self {
Self {
root,
metadata_base_url,
targets_base_url,
transport: None,
limits: None,
datastore: None,
expiration_enforcement: None,
}
}
/// Load and verify TUF repository metadata.
pub fn load(self) -> Result<Repository> {
Repository::load(self)
}
/// Set the transport. If no transport has been set, [`DefaultTransport`] will be used.
pub fn transport<T: Transport + 'static>(mut self, transport: T) -> Self {
self.transport = Some(Box::new(transport));
self
}
/// Set a the repository [`Limits`].
pub fn limits(mut self, limits: Limits) -> Self {
self.limits = Some(limits);
self
}
/// Set a `datastore` directory path. `datastore` is a directory on a persistent filesystem.
/// This directory's contents store the most recently fetched timestamp, snapshot, and targets
/// metadata files to detect version rollback attacks.
///
/// You may chose to provide a [`PathBuf`] to a directory on a persistent filesystem, which must
/// exist prior to calling [`RepositoryLoader::load`]. If no datastore is provided, a temporary
/// directory will be created and cleaned up for for you.
pub fn datastore<P: Into<PathBuf>>(mut self, datastore: P) -> Self {
self.datastore = Some(datastore.into());
self
}
/// Set the [`ExpirationEnforcement`].
///
/// **CAUTION:** TUF metadata expiration dates, particularly `timestamp.json`, are designed to
/// limit a replay attack window. By setting `expiration_enforcement` to `Unsafe`, you are
/// disabling this feature of TUF. Use `Safe` unless you have a good reason to use `Unsafe`.
pub fn expiration_enforcement(mut self, exp: ExpirationEnforcement) -> Self {
self.expiration_enforcement = Some(exp);
self
}
}
/// Limits used when fetching repository metadata.
///
/// These limits are implemented to prevent endless data attacks. Clients must ensure these values
/// are set higher than what would reasonably be expected by a repository, but not so high that the
/// amount of data could interfere with the system.
///
/// `max_root_size` and `max_timestamp_size` are the maximum size for the `root.json` and
/// `timestamp.json` files, respectively, downloaded from the repository. These must be
/// sufficiently large such that future updates to your repository's key management strategy
/// will still be supported, but sufficiently small such that you are protected against an
/// endless data attack (defined by TUF as an attacker responding to clients with extremely
/// large files that interfere with the client's system).
///
/// The [`Default`] implementation sets the following values:
/// * `max_root_size`: 1 MiB
/// * `max_targets_size`: 10 MiB
/// * `max_timestamp_size`: 1 MiB
/// * `max_root_updates`: 1024
#[derive(Debug, Clone, Copy)]
pub struct Limits {
/// The maximum allowable size in bytes for downloaded root.json files.
pub max_root_size: u64,
/// The maximum allowable size in bytes for downloaded targets.json file **if** the size is not
/// listed in snapshots.json. This setting is ignored if the size of targets.json is in the
/// signed snapshots.json file.
pub max_targets_size: u64,
/// The maximum allowable size in bytes for the downloaded timestamp.json file.
pub max_timestamp_size: u64,
/// The maximum number of updates to root.json to download.
pub max_root_updates: u64,
}
impl Default for Limits {
fn default() -> Self {
Self {
max_root_size: 1024 * 1024, // 1 MiB
max_targets_size: 1024 * 1024 * 10, // 10 MiB
max_timestamp_size: 1024 * 1024, // 1 MiB
max_root_updates: 1024,
}
}
}
/// Use this enum to specify whether or not we should include a prefix in the target name when
/// saving a target.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Prefix {
/// Do not prepend the target name when saving the target file, e.g. `my-target.txt`.
None,
/// Prepend the sha digest when saving the target file, e.g. `0123456789abcdef.my-target.txt`.
Digest,
}
/// A TUF repository.
///
/// You can create a `Repository` using a [`RepositoryLoader`].
#[derive(Debug, Clone)]
pub struct Repository {
transport: Box<dyn Transport>,
consistent_snapshot: bool,
datastore: Datastore,
earliest_expiration: DateTime<Utc>,
earliest_expiration_role: RoleType,
root: Signed<Root>,
snapshot: Signed<Snapshot>,
timestamp: Signed<Timestamp>,
targets: Signed<crate::schema::Targets>,
limits: Limits,
metadata_base_url: Url,
targets_base_url: Url,
expiration_enforcement: ExpirationEnforcement,
}
impl Repository {
/// Load and verify TUF repository metadata using a [`RepositoryLoader`] for the settings.
fn load<R: Read>(loader: RepositoryLoader<R>) -> Result<Self> {
let datastore = Datastore::new(loader.datastore)?;
let transport = loader
.transport
.unwrap_or_else(|| Box::new(DefaultTransport::new()));
let limits = loader.limits.unwrap_or_default();
let expiration_enforcement = loader.expiration_enforcement.unwrap_or_default();
let metadata_base_url = parse_url(loader.metadata_base_url)?;
let targets_base_url = parse_url(loader.targets_base_url)?;
// 0. Load the trusted root metadata file + 1. Update the root metadata file
let root = load_root(
transport.as_ref(),
loader.root,
&datastore,
limits.max_root_size,
limits.max_root_updates,
&metadata_base_url,
expiration_enforcement,
)?;
// 2. Download the timestamp metadata file
let timestamp = load_timestamp(
transport.as_ref(),
&root,
&datastore,
limits.max_timestamp_size,
&metadata_base_url,
expiration_enforcement,
)?;
// 3. Download the snapshot metadata file
let snapshot = load_snapshot(
transport.as_ref(),
&root,
×tamp,
&datastore,
&metadata_base_url,
expiration_enforcement,
)?;
// 4. Download the targets metadata file
let targets = load_targets(
transport.as_ref(),
&root,
&snapshot,
&datastore,
limits.max_targets_size,
&metadata_base_url,
expiration_enforcement,
)?;
let expires_iter = [
(root.signed.expires, RoleType::Root),
(timestamp.signed.expires, RoleType::Timestamp),
(snapshot.signed.expires, RoleType::Snapshot),
(targets.signed.expires, RoleType::Targets),
];
let (earliest_expiration, earliest_expiration_role) =
expires_iter.iter().min_by_key(|tup| tup.0).unwrap();
Ok(Self {
transport,
consistent_snapshot: root.signed.consistent_snapshot,
datastore,
earliest_expiration: *earliest_expiration,
earliest_expiration_role: *earliest_expiration_role,
root,
snapshot,
timestamp,
targets,
limits,
metadata_base_url,
targets_base_url,
expiration_enforcement,
})
}
/// Returns the list of targets present in the repository.
pub fn targets(&self) -> &Signed<crate::schema::Targets> {
&self.targets
}
/// Returns a reference to the signed root
pub fn root(&self) -> &Signed<Root> {
&self.root
}
/// Returns a reference to the signed snapshot
pub fn snapshot(&self) -> &Signed<Snapshot> {
&self.snapshot
}
/// Returns a reference to the signed timestamp
pub fn timestamp(&self) -> &Signed<Timestamp> {
&self.timestamp
}
///return a vec of all targets including all target files delegated by targets
pub fn all_targets(&self) -> impl Iterator + '_ {
self.targets.signed.targets_iter()
}
/// Fetches a target from the repository.
///
/// If the repository metadata is expired or there is an issue making the request, `Err` is
/// returned.
///
/// If the requested target is not listed in the repository metadata, `Ok(None)` is returned.
///
/// Otherwise, a reader is returned, which provides streaming access to the target contents
/// before its checksum is validated. If the maximum size is reached or there is a checksum
/// mismatch, the reader returns a [`std::io::Error`]. **Consumers of this library must not use
/// data from the reader if it returns an error.**
pub fn read_target(&self, name: &TargetName) -> Result<Option<impl Read + Send>> {
// Check for repository metadata expiration.
if self.expiration_enforcement == ExpirationEnforcement::Safe {
ensure!(
system_time(&self.datastore)? < self.earliest_expiration,
error::ExpiredMetadata {
role: self.earliest_expiration_role
}
);
}
// 5. Verify the desired target against its targets metadata.
//
// 5.1. If there is no targets metadata about this target, abort the update cycle and
// report that there is no such target.
//
// 5.2. Otherwise, download the target (up to the number of bytes specified in the targets
// metadata), and verify that its hashes match the targets metadata. (We download up to
// this number of bytes, because in some cases, the exact number is unknown. This may
// happen, for example, if an external program is used to compute the root hash of a tree
// of targets files, and this program does not provide the total size of all of these
// files.) If consistent snapshots are not used (see Section 7), then the filename used
// to download the target file is of the fixed form FILENAME.EXT (e.g., foobar.tar.gz).
// Otherwise, the filename is of the form HASH.FILENAME.EXT (e.g.,
// c14aeb4ac9f4a8fc0d83d12482b9197452f6adf3eb710e3b1e2b79e8d14cb681.foobar.tar.gz), where
// HASH is one of the hashes of the targets file listed in the targets metadata file
// found earlier in step 4. In either case, the client MUST write the file to
// non-volatile storage as FILENAME.EXT.
Ok(if let Ok(target) = self.targets.signed.find_target(name) {
let (sha256, file) = self.target_digest_and_filename(target, name);
Some(self.fetch_target(target, &sha256, file.as_str())?)
} else {
None
})
}
/// Fetches a target from the repository and saves it to `outdir`. Attempts to do this as safely
/// as possible by using `path_clean` to eliminate `../` path traversals from the the target's
/// name. Ensures that the resulting filepath is in `outdir` or a child of `outdir`.
///
/// # Parameters
///
/// - `name`: the target name.
/// - `outdir`: the directory to save the target in.
/// - `prepend`: Whether or not to prepend the sha digest when saving the target file.
///
/// # Preconditions and Behavior
///
/// - `outdir` must exist. For safety we want to canonicalize the path before we join to it.
/// - intermediate directories will be created in `outdir` with `create_dir_all`
/// - Will error if the result of path resolution results in a filepath outside of `outdir` or
/// outside of a delegated target's correct path of delegation.
///
pub fn save_target<P>(&self, name: &TargetName, outdir: P, prepend: Prefix) -> Result<()>
where
P: AsRef<Path>,
{
// Ensure the outdir exists then canonicalize the path.
let outdir = outdir.as_ref();
let outdir = outdir
.canonicalize()
.context(error::SaveTargetOutdirCanonicalize { path: outdir })?;
ensure!(outdir.is_dir(), error::SaveTargetOutdir { path: outdir });
if name.resolved() != name.raw() {
// Since target names with resolvable path segments are unusual and potentially unsafe,
// we warn the user that we have encountered them.
warn!(
"The target named '{}' had path segments that were resolved to produce the \
following name: {}",
name.raw(),
name.resolved()
);
}
let filename = match prepend {
Prefix::Digest => {
let target = self.targets.signed.find_target(name).with_context(|| {
error::CacheTargetMissing {
target_name: name.clone(),
}
})?;
let sha256 = target.hashes.sha256.clone().into_vec();
format!("{}.{}", hex::encode(sha256), name.resolved())
}
Prefix::None => name.resolved().to_owned(),
};
let resolved_filepath = outdir.join(filename);
// Find out what directory we will be writing the target file to.
let filepath_dir =
resolved_filepath
.parent()
.with_context(|| error::SaveTargetNoParent {
path: &resolved_filepath,
name: name.clone(),
})?;
// Make sure the filepath we are writing to is in or below outdir.
ensure!(
filepath_dir.starts_with(&outdir),
error::SaveTargetUnsafePath {
name: name.clone(),
outdir,
filepath: &resolved_filepath,
}
);
// Fetch and write the target using NamedTempFile for an atomic file creation.
let mut reader = self
.read_target(name)?
.with_context(|| error::SaveTargetNotFound { name: name.clone() })?;
create_dir_all(&filepath_dir).context(error::DirCreate {
path: &filepath_dir,
})?;
let mut f = NamedTempFile::new_in(&filepath_dir).context(error::NamedTempFileCreate {
path: &filepath_dir,
})?;
std::io::copy(&mut reader, &mut f).context(error::FileWrite { path: &f.path() })?;
f.persist(&resolved_filepath)
.context(error::NamedTempFilePersist {
path: resolved_filepath,
})?;
Ok(())
}
/// Return the named `DelegatedRole` if found.
pub fn delegated_role(&self, name: &str) -> Option<&DelegatedRole> {
self.targets.signed.delegated_role(name).ok()
}
}
/// The set of characters that will be escaped when converting a delegated role name into a
/// filename. This needs to at least include path traversal characters to prevent tough from writing
/// outside of its datastore.
///
/// In order to match the Python TUF implementation, we mimic the Python function
/// [urllib.parse.quote] (given a 'safe' parameter value of `""`) which follows RFC 3986 and states
///
/// > Replace special characters in string using the %xx escape. Letters, digits, and the characters
/// `_.-~` are never quoted.
///
/// [urllib.parse.quote]: https://docs.python.org/3/library/urllib.parse.html#url-quoting
const CHARACTERS_TO_ESCAPE: AsciiSet = NON_ALPHANUMERIC
.remove(b'_')
.remove(b'.')
.remove(b'-')
.remove(b'~');
/// Percent encode a potential filename to ensure it is safe and does not have path traversal
/// characters.
pub(crate) fn encode_filename<S: AsRef<str>>(name: S) -> String {
utf8_percent_encode(name.as_ref(), &CHARACTERS_TO_ESCAPE).to_string()
}
/// Ensures that system time has not stepped backward since it was last sampled
fn system_time(datastore: &Datastore) -> Result<DateTime<Utc>> {
let file = "latest_known_time.json";
// Get 'current' system time
let sys_time = Utc::now();
// Load the latest known system time, if it exists
if let Some(Ok(latest_known_time)) = datastore
.reader(file)?
.map(serde_json::from_reader::<_, DateTime<Utc>>)
{
// Make sure the sampled system time did not go back in time
ensure!(
sys_time >= latest_known_time,
error::SystemTimeSteppedBackward {
sys_time,
latest_known_time
}
);
}
// Store the latest known time
// Serializes RFC3339 time string and store to datastore
datastore.create(file, &sys_time)?;
Ok(sys_time)
}
/// TUF v1.0.16, 5.2.9, 5.3.3, 5.4.5, 5.5.4, The expiration timestamp in the `[metadata]` file MUST
/// be higher than the fixed update start time.
fn check_expired<T: Role>(datastore: &Datastore, role: &T) -> Result<()> {
ensure!(
system_time(datastore)? <= role.expires(),
error::ExpiredMetadata { role: T::TYPE }
);
Ok(())
}
/// Checks to see if the `Url` has a trailing slash and adds one if not. Without a trailing slash,
/// the last component of a `Url` is considered to be a file. `metadata_url` and `targets_url`
/// must refer to a base (i.e. directory), so we need them to end with a slash.
fn parse_url(url: Url) -> Result<Url> {
if url.as_str().ends_with('/') {
Ok(url)
} else {
let mut s = url.to_string();
s.push('/');
Url::parse(&s).context(error::ParseUrl { url: s })
}
}
/// Steps 0 and 1 of the client application, which load the current root metadata file based on a
/// trusted root metadata file.
fn load_root<R: Read>(
transport: &dyn Transport,
root: R,
datastore: &Datastore,
max_root_size: u64,
max_root_updates: u64,
metadata_base_url: &Url,
expiration_enforcement: ExpirationEnforcement,
) -> Result<Signed<Root>> {
// 0. Load the trusted root metadata file. We assume that a good, trusted copy of this file was
// shipped with the package manager or software updater using an out-of-band process. Note
// that the expiration of the trusted root metadata file does not matter, because we will
// attempt to update it in the next step.
let mut root: Signed<Root> =
serde_json::from_reader(root).context(error::ParseTrustedMetadata)?;
root.signed
.verify_role(&root)
.context(error::VerifyTrustedMetadata)?;
// Used in step 1.2
let original_root_version = root.signed.version.get();
// Used in step 1.9
let original_timestamp_keys = root
.signed
.keys(RoleType::Timestamp)
.cloned()
.collect::<Vec<_>>();
let original_snapshot_keys = root
.signed
.keys(RoleType::Snapshot)
.cloned()
.collect::<Vec<_>>();
// 1. Update the root metadata file. Since it may now be signed using entirely different keys,
// the client must somehow be able to establish a trusted line of continuity to the latest
// set of keys. To do so, the client MUST download intermediate root metadata files, until
// the latest available one is reached. Therefore, it MUST temporarily turn on consistent
// snapshots in order to download versioned root metadata files as described next.
loop {
// 1.1. Let N denote the version number of the trusted root metadata file.
//
// 1.2. Try downloading version N+1 of the root metadata file, up to some X number of bytes
// (because the size is unknown). The value for X is set by the authors of the
// application using TUF. For example, X may be tens of kilobytes. The filename used to
// download the root metadata file is of the fixed form VERSION_NUMBER.FILENAME.EXT
// (e.g., 42.root.json). If this file is not available, or we have downloaded more than Y
// number of root metadata files (because the exact number is as yet unknown), then go to
// step 1.8. The value for Y is set by the authors of the application using TUF. For
// example, Y may be 2^10.
ensure!(
root.signed.version.get() < original_root_version + max_root_updates,
error::MaxUpdatesExceeded { max_root_updates }
);
let path = format!("{}.root.json", root.signed.version.get() + 1);
match fetch_max_size(
transport,
metadata_base_url.join(&path).context(error::JoinUrl {
path,
url: metadata_base_url.clone(),
})?,
max_root_size,
"max_root_size argument",
) {
Err(_) => break, // If this file is not available, then go to step 1.8.
Ok(reader) => {
let new_root: Signed<Root> =
serde_json::from_reader(reader).context(error::ParseMetadata {
role: RoleType::Root,
})?;
// 1.3. Check signatures. Version N+1 of the root metadata file MUST have been
// signed by: (1) a threshold of keys specified in the trusted root metadata file
// (version N), and (2) a threshold of keys specified in the new root metadata
// file being validated (version N+1). If version N+1 is not signed as required,
// discard it, abort the update cycle, and report the signature failure. On the
// next update cycle, begin at step 0 and version N of the root metadata file.
root.signed
.verify_role(&new_root)
.context(error::VerifyMetadata {
role: RoleType::Root,
})?;
new_root
.signed
.verify_role(&new_root)
.context(error::VerifyMetadata {
role: RoleType::Root,
})?;
// 1.4. Check for a rollback attack. The version number of the trusted root
// metadata file (version N) must be less than or equal to the version number of
// the new root metadata file (version N+1). Effectively, this means checking
// that the version number signed in the new root metadata file is indeed N+1. If
// the version of the new root metadata file is less than the trusted metadata
// file, discard it, abort the update cycle, and report the rollback attack. On
// the next update cycle, begin at step 0 and version N of the root metadata
// file.
ensure!(
root.signed.version <= new_root.signed.version,
error::OlderMetadata {
role: RoleType::Root,
current_version: root.signed.version,
new_version: new_root.signed.version
}
);
// Off-spec: 1.4 specifies that the version number of the trusted root metadata
// file must be less than or equal to the version number of the new root metadata
// file. If they are equal, this will create an infinite loop, so we ignore the new
// root metadata file but do not report an error. This could only happen if the
// path we built above, referencing N+1, has a filename that doesn't match its
// contents, which would have to list version N.
if root.signed.version == new_root.signed.version {
break;
}
// 1.5. Note that the expiration of the new (intermediate) root metadata file does
// not matter yet, because we will check for it in step 1.8.
//
// 1.6. Set the trusted root metadata file to the new root metadata file.
//
// (This is where version N+1 becomes version N.)
root = new_root;
// 1.7. Repeat steps 1.1 to 1.7.
continue;
}
}
}
// TUF v1.0.16, 5.2.9. Check for a freeze attack. The expiration timestamp in the trusted root
// metadata file MUST be higher than the fixed update start time. If the trusted root metadata
// file has expired, abort the update cycle, report the potential freeze attack. On the next
// update cycle, begin at step 5.1 and version N of the root metadata file.
if expiration_enforcement == ExpirationEnforcement::Safe {
check_expired(datastore, &root.signed)?;
}
// 1.9. If the timestamp and / or snapshot keys have been rotated, then delete the trusted
// timestamp and snapshot metadata files. This is done in order to recover from fast-forward
// attacks after the repository has been compromised and recovered. A fast-forward attack
// happens when attackers arbitrarily increase the version numbers of: (1) the timestamp
// metadata, (2) the snapshot metadata, and / or (3) the targets, or a delegated targets,
// metadata file in the snapshot metadata.
if original_timestamp_keys
.iter()
.ne(root.signed.keys(RoleType::Timestamp))
|| original_snapshot_keys
.iter()
.ne(root.signed.keys(RoleType::Snapshot))
{
let r1 = datastore.remove("timestamp.json");
let r2 = datastore.remove("snapshot.json");
r1.and(r2)?;
}
// 1.10. Set whether consistent snapshots are used as per the trusted root metadata file (see
// Section 4.3).
//
// (This is done by checking the value of root.signed.consistent_snapshot throughout this
// library.)
Ok(root)
}
/// Step 2 of the client application, which loads the timestamp metadata file.
fn load_timestamp(
transport: &dyn Transport,
root: &Signed<Root>,
datastore: &Datastore,
max_timestamp_size: u64,
metadata_base_url: &Url,
expiration_enforcement: ExpirationEnforcement,
) -> Result<Signed<Timestamp>> {
// 2. Download the timestamp metadata file, up to Y number of bytes (because the size is
// unknown.) The value for Y is set by the authors of the application using TUF. For
// example, Y may be tens of kilobytes. The filename used to download the timestamp metadata
// file is of the fixed form FILENAME.EXT (e.g., timestamp.json).
let path = "timestamp.json";
let reader = fetch_max_size(
transport,
metadata_base_url.join(path).context(error::JoinUrl {
path,
url: metadata_base_url.clone(),
})?,
max_timestamp_size,
"max_timestamp_size argument",
)?;
let timestamp: Signed<Timestamp> =
serde_json::from_reader(reader).context(error::ParseMetadata {
role: RoleType::Timestamp,
})?;
// 2.1. Check signatures. The new timestamp metadata file must have been signed by a threshold
// of keys specified in the trusted root metadata file. If the new timestamp metadata file is
// not properly signed, discard it, abort the update cycle, and report the signature failure.
root.signed
.verify_role(×tamp)
.context(error::VerifyMetadata {
role: RoleType::Timestamp,
})?;
// 2.2. Check for a rollback attack. The version number of the trusted timestamp metadata file,
// if any, must be less than or equal to the version number of the new timestamp metadata
// file. If the new timestamp metadata file is older than the trusted timestamp metadata
// file, discard it, abort the update cycle, and report the potential rollback attack.
if let Some(Ok(old_timestamp)) = datastore
.reader("timestamp.json")?
.map(serde_json::from_reader::<_, Signed<Timestamp>>)
{
if root.signed.verify_role(&old_timestamp).is_ok() {
ensure!(
old_timestamp.signed.version <= timestamp.signed.version,
error::OlderMetadata {
role: RoleType::Timestamp,
current_version: old_timestamp.signed.version,
new_version: timestamp.signed.version
}
);
}
}
// TUF v1.0.16, 5.3.3. Check for a freeze attack. The expiration timestamp in the new timestamp
// metadata file MUST be higher than the fixed update start time. If so, the new timestamp
// metadata file becomes the trusted timestamp metadata file. If the new timestamp metadata file
// has expired, discard it, abort the update cycle, and report the potential freeze attack.
if expiration_enforcement == ExpirationEnforcement::Safe {
check_expired(datastore, ×tamp.signed)?;
}
// Now that everything seems okay, write the timestamp file to the datastore.
datastore.create("timestamp.json", ×tamp)?;
Ok(timestamp)
}
/// Step 3 of the client application, which loads the snapshot metadata file.
fn load_snapshot(
transport: &dyn Transport,
root: &Signed<Root>,
timestamp: &Signed<Timestamp>,
datastore: &Datastore,
metadata_base_url: &Url,
expiration_enforcement: ExpirationEnforcement,
) -> Result<Signed<Snapshot>> {
// 3. Download snapshot metadata file, up to the number of bytes specified in the timestamp
// metadata file. If consistent snapshots are not used (see Section 7), then the filename
// used to download the snapshot metadata file is of the fixed form FILENAME.EXT (e.g.,
// snapshot.json). Otherwise, the filename is of the form VERSION_NUMBER.FILENAME.EXT (e.g.,
// 42.snapshot.json), where VERSION_NUMBER is the version number of the snapshot metadata
// file listed in the timestamp metadata file. In either case, the client MUST write the
// file to non-volatile storage as FILENAME.EXT.
let snapshot_meta = timestamp
.signed
.meta
.get("snapshot.json")
.context(error::MetaMissing {
file: "snapshot.json",
role: RoleType::Timestamp,
})?;
let path = if root.signed.consistent_snapshot {
format!("{}.snapshot.json", snapshot_meta.version)
} else {
"snapshot.json".to_owned()
};
let reader = fetch_sha256(
transport,
metadata_base_url.join(&path).context(error::JoinUrl {
path,
url: metadata_base_url.clone(),
})?,
snapshot_meta.length,
"timestamp.json",
&snapshot_meta.hashes.sha256,
)?;
let snapshot: Signed<Snapshot> =
serde_json::from_reader(reader).context(error::ParseMetadata {
role: RoleType::Snapshot,
})?;
// 3.1. Check against timestamp metadata. The hashes and version number of the new snapshot
// metadata file MUST match the hashes and version number listed in timestamp metadata. If
// hashes and version do not match, discard the new snapshot metadata, abort the update
// cycle, and report the failure.
//
// (We already checked the hash in `fetch_sha256` above.)
ensure!(
snapshot.signed.version == snapshot_meta.version,
error::VersionMismatch {
role: RoleType::Snapshot,
fetched: snapshot.signed.version,
expected: snapshot_meta.version
}
);
// 3.2. Check signatures. The new snapshot metadata file MUST have been signed by a threshold
// of keys specified in the trusted root metadata file. If the new snapshot metadata file is
// not signed as required, discard it, abort the update cycle, and report the signature
// failure.
root.signed
.verify_role(&snapshot)
.context(error::VerifyMetadata {
role: RoleType::Snapshot,
})?;
// 3.3. Check for a rollback attack.
//
// 3.3.1. Note that the trusted snapshot metadata file may be checked for authenticity, but its
// expiration does not matter for the following purposes.
if let Some(Ok(old_snapshot)) = datastore
.reader("snapshot.json")?
.map(serde_json::from_reader::<_, Signed<Snapshot>>)
{
// 3.3.2. The version number of the trusted snapshot metadata file, if any, MUST be less
// than or equal to the version number of the new snapshot metadata file. If the new
// snapshot metadata file is older than the trusted metadata file, discard it, abort the
// update cycle, and report the potential rollback attack.
if root.signed.verify_role(&old_snapshot).is_ok() {
ensure!(
old_snapshot.signed.version <= snapshot.signed.version,
error::OlderMetadata {
role: RoleType::Snapshot,
current_version: old_snapshot.signed.version,
new_version: snapshot.signed.version
}
);
// 3.3.3. The version number of the targets metadata file, and all delegated targets
// metadata files (if any), in the trusted snapshot metadata file, if any, MUST be
// less than or equal to its version number in the new snapshot metadata file.
// Furthermore, any targets metadata filename that was listed in the trusted snapshot
// metadata file, if any, MUST continue to be listed in the new snapshot metadata
// file. If any of these conditions are not met, discard the new snaphot metadadata
// file, abort the update cycle, and report the failure.
if let Some(old_targets_meta) = old_snapshot.signed.meta.get("targets.json") {
let targets_meta =
snapshot
.signed
.meta
.get("targets.json")
.context(error::MetaMissing {
file: "targets.json",
role: RoleType::Snapshot,
})?;
ensure!(
old_targets_meta.version <= targets_meta.version,
error::OlderMetadata {
role: RoleType::Targets,
current_version: old_targets_meta.version,
new_version: targets_meta.version,
}
);
}
}
}
// TUF v1.0.16, 5.4.5. Check for a freeze attack. The expiration timestamp in the new snapshot
// metadata file MUST be higher than the fixed update start time. If so, the new snapshot
// metadata file becomes the trusted snapshot metadata file. If the new snapshot metadata file
// is expired, discard it, abort the update cycle, and report the potential freeze attack.
if expiration_enforcement == ExpirationEnforcement::Safe {
check_expired(datastore, &snapshot.signed)?;
}
// Now that everything seems okay, write the snapshot file to the datastore.
datastore.create("snapshot.json", &snapshot)?;
Ok(snapshot)
}
/// Step 4 of the client application, which loads the targets metadata file.
fn load_targets(
transport: &dyn Transport,
root: &Signed<Root>,
snapshot: &Signed<Snapshot>,
datastore: &Datastore,
max_targets_size: u64,
metadata_base_url: &Url,
expiration_enforcement: ExpirationEnforcement,
) -> Result<Signed<crate::schema::Targets>> {
// 4. Download the top-level targets metadata file, up to either the number of bytes specified
// in the snapshot metadata file, or some Z number of bytes. The value for Z is set by the
// authors of the application using TUF. For example, Z may be tens of kilobytes. If
// consistent snapshots are not used (see Section 7), then the filename used to download the
// targets metadata file is of the fixed form FILENAME.EXT (e.g., targets.json). Otherwise,
// the filename is of the form VERSION_NUMBER.FILENAME.EXT (e.g., 42.targets.json), where
// VERSION_NUMBER is the version number of the targets metadata file listed in the snapshot
// metadata file. In either case, the client MUST write the file to non-volatile storage as
// FILENAME.EXT.
let targets_meta = snapshot
.signed
.meta
.get("targets.json")
.context(error::MetaMissing {
file: "targets.json",
role: RoleType::Timestamp,
})?;
let path = if root.signed.consistent_snapshot {
format!("{}.targets.json", targets_meta.version)
} else {
"targets.json".to_owned()
};
let targets_url = metadata_base_url.join(&path).context(error::JoinUrl {
path,
url: metadata_base_url.clone(),
})?;
let (max_targets_size, specifier) = match targets_meta.length {
Some(length) => (length, "snapshot.json"),
None => (max_targets_size, "max_targets_size parameter"),
};
let reader = if let Some(hashes) = &targets_meta.hashes {
Box::new(fetch_sha256(
transport,
targets_url,
max_targets_size,
specifier,
&hashes.sha256,
)?) as Box<dyn Read>
} else {
Box::new(fetch_max_size(
transport,
targets_url,
max_targets_size,
specifier,
)?)
};
let mut targets: Signed<crate::schema::Targets> =
serde_json::from_reader(reader).context(error::ParseMetadata {
role: RoleType::Targets,
})?;
// 4.1. Check against snapshot metadata. The hashes (if any), and version number of the new
// targets metadata file MUST match the trusted snapshot metadata. This is done, in part, to
// prevent a mix-and-match attack by man-in-the-middle attackers. If the new targets metadata
// file does not match, discard it, abort the update cycle, and report the failure. | ensure!(
targets.signed.version == targets_meta.version,
error::VersionMismatch {
role: RoleType::Targets,
fetched: targets.signed.version,
expected: targets_meta.version
}
);
// 4.2. Check for an arbitrary software attack. The new targets metadata file MUST have been
// signed by a threshold of keys specified in the trusted root metadata file. If the new
// targets metadata file is not signed as required, discard it, abort the update cycle, and
// report the failure.
root.signed
.verify_role(&targets)
.context(error::VerifyMetadata {
role: RoleType::Targets,
})?;
// 4.3. Check for a rollback attack. The version number of the trusted targets metadata file,
// if any, MUST be less than or equal to the version number of the new targets metadata file.
// If the new targets metadata file is older than the trusted targets metadata file, discard
// it, abort the update cycle, and report the potential rollback attack.
if let Some(Ok(old_targets)) = datastore
.reader("targets.json")?
.map(serde_json::from_reader::<_, Signed<crate::schema::Targets>>)
{
if root.signed.verify_role(&old_targets).is_ok() {
ensure!(
old_targets.signed.version <= targets.signed.version,
error::OlderMetadata {
role: RoleType::Targets,
current_version: old_targets.signed.version,
new_version: targets.signed.version
}
);
}
}
// TUF v1.0.16, 5.5.4. Check for a freeze attack. The expiration timestamp in the new targets
// metadata file MUST be higher than the fixed update start time. If so, the new targets
// metadata file becomes the trusted targets metadata file. If the new targets metadata file is
// expired, discard it, abort the update cycle, and report the potential freeze attack.
if expiration_enforcement == ExpirationEnforcement::Safe {
check_expired(datastore, &targets.signed)?;
}
// Now that everything seems okay, write the targets file to the datastore.
datastore.create("targets.json", &targets)?;
// 4.5. Perform a preorder depth-first search for metadata about the desired target, beginning
// with the top-level targets role.
if let Some(delegations) = &mut targets.signed.delegations {
load_delegations(
transport,
snapshot,
root.signed.consistent_snapshot,
metadata_base_url,
max_targets_size,
delegations,
datastore,
)?;
}
// This validation can only be done from the top level targets.json role. This check verifies
// that each target's delegate hierarchy is a match (i.e. it's delegate ownership is valid).
targets.signed.validate().context(error::InvalidPath)?;
Ok(targets)
}
// Follow the paths of delegations starting with the top level targets.json delegation
fn load_delegations(
transport: &dyn Transport,
snapshot: &Signed<Snapshot>,
consistent_snapshot: bool,
metadata_base_url: &Url,
max_targets_size: u64,
delegation: &mut Delegations,
datastore: &Datastore,
) -> Result<()> {
let mut delegated_roles: HashMap<String, Option<Signed<crate::schema::Targets>>> =
HashMap::new();
for delegated_role in &delegation.roles {
// find the role file metadata
let role_meta = snapshot
.signed
.meta
.get(&format!("{}.json", &delegated_role.name))
.context(error::RoleNotInMeta {
name: delegated_role.name.clone(),
})?;
let path = if consistent_snapshot {
format!(
"{}.{}.json",
&role_meta.version,
encode_filename(&delegated_role.name)
)
} else {
format!("{}.json", encode_filename(&delegated_role.name))
};
let role_url = metadata_base_url.join(&path).context(error::JoinUrl {
path: path.clone(),
url: metadata_base_url.clone(),
})?;
let specifier = "max_targets_size parameter";
// load the role json file
let reader = Box::new(fetch_max_size(
transport,
role_url,
max_targets_size,
specifier,
)?);
// since each role is a targets, we load them as such
let role: Signed<crate::schema::Targets> =
serde_json::from_reader(reader).context(error::ParseMetadata {
role: RoleType::Targets,
})?;
// verify each role with the delegation
delegation
.verify_role(&role, &delegated_role.name)
.context(error::VerifyMetadata {
role: RoleType::Targets,
})?;
ensure!(
role.signed.version == role_meta.version,
error::VersionMismatch {
role: RoleType::Targets,
fetched: role.signed.version,
expected: role_meta.version
}
);
datastore.create(&path, &role)?;
delegated_roles.insert(delegated_role.name.clone(), Some(role));
}
// load all roles delegated by this role
for delegated_role in &mut delegation.roles {
delegated_role.targets = delegated_roles.remove(&delegated_role.name).context(
error::DelegatedRolesNotConsistent {
name: delegated_role.name.clone(),
},
)?;
if let Some(targets) = &mut delegated_role.targets {
if let Some(delegations) = &mut targets.signed.delegations {
load_delegations(
transport,
snapshot,
consistent_snapshot,
metadata_base_url,
max_targets_size,
delegations,
datastore,
)?;
}
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
// Check if a url with a trailing slash and one without trailing slash can both be parsed
#[test]
fn url_missing_trailing_slash() {
let parsed_url_without_trailing_slash =
parse_url(Url::parse("https://example.org/a/b/c").unwrap()).unwrap();
let parsed_url_with_trailing_slash =
parse_url(Url::parse("https://example.org/a/b/c/").unwrap()).unwrap();
assert_eq!(
parsed_url_without_trailing_slash,
parsed_url_with_trailing_slash
);
}
// Ensure that the `ExpirationEnforcement` traits are not changed by mistake.
#[test]
fn expiration_enforcement_traits() {
let enforce = true;
let safe: ExpirationEnforcement = enforce.into();
assert_eq!(safe, ExpirationEnforcement::Safe);
let not_enforce = false;
let not_safe: ExpirationEnforcement = not_enforce.into();
assert_eq!(not_safe, ExpirationEnforcement::Unsafe);
let enforcing: bool = ExpirationEnforcement::Safe.into();
assert!(enforcing);
let non_enforcing: bool = ExpirationEnforcement::Unsafe.into();
assert!(!non_enforcing);
let default = ExpirationEnforcement::default();
assert_eq!(default, ExpirationEnforcement::Safe);
}
#[test]
fn encode_filename_1() {
let input = "../a";
let expected = "..%2Fa";
let actual = encode_filename(input);
assert_eq!(expected, actual);
}
#[test]
fn encode_filename_2() {
let input = "";
let expected = "";
let actual = encode_filename(input);
assert_eq!(expected, actual);
}
#[test]
fn encode_filename_3() {
let input = ".";
let expected = ".";
let actual = encode_filename(input);
assert_eq!(expected, actual);
}
#[test]
fn encode_filename_4() {
let input = "/";
let expected = "%2F";
let actual = encode_filename(input);
assert_eq!(expected, actual);
}
#[test]
fn encode_filename_5() {
let input = "ö";
let expected = "%C3%B6";
let actual = encode_filename(input);
assert_eq!(expected, actual);
}
#[test]
fn encode_filename_6() {
let input = "!@#$%^&*()[]|\\~`'\";:.,><?/-_";
let expected =
"%21%40%23%24%25%5E%26%2A%28%29%5B%5D%7C%5C~%60%27%22%3B%3A.%2C%3E%3C%3F%2F-_";
let actual = encode_filename(input);
assert_eq!(expected, actual);
}
#[test]
fn encode_filename_7() {
let input = "../../strange/role/../name";
let expected = "..%2F..%2Fstrange%2Frole%2F..%2Fname";
let actual = encode_filename(input);
assert_eq!(expected, actual);
}
#[test]
fn encode_filename_8() {
let input = "../🍺/( ͡° ͜ʖ ͡°)";
let expected = "..%2F%F0%9F%8D%BA%2F%28%20%CD%A1%C2%B0%20%CD%9C%CA%96%20%CD%A1%C2%B0%29";
let actual = encode_filename(input);
assert_eq!(expected, actual);
}
#[test]
fn encode_filename_9() {
let input = "ᚩ os, ᚱ rad, ᚳ cen, ᚷ gyfu, ᚹ ƿynn, ᚻ hægl, ...";
let expected = "%E1%9A%A9%20os%2C%20%E1%9A%B1%20rad%2C%20%E1%9A%B3%20cen%2C%20%E1%9A%B7%20gyfu%2C%20%E1%9A%B9%20%C6%BFynn%2C%20%E1%9A%BB%20h%C3%A6gl%2C%20...";
let actual = encode_filename(input);
assert_eq!(expected, actual);
}
#[test]
fn encode_filename_10() {
let input = "../../path/like/dubious";
let expected = "..%2F..%2Fpath%2Flike%2Fdubious";
let actual = encode_filename(input);
assert_eq!(expected, actual);
}
#[test]
fn encode_filename_11() {
let input = "🍺/30";
let expected = "%F0%9F%8D%BA%2F30";
let actual = encode_filename(input);
assert_eq!(expected, actual);
}
} | //
// (We already checked the hash in `fetch_sha256` above.) |
bitstamp1.py | # -*- coding: utf-8 -*-
from ccxt.async.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import NotSupported
class bitstamp1 (Exchange):
def describe(self):
return self.deep_extend(super(bitstamp1, self).describe(), {
'id': 'bitstamp1',
'name': 'Bitstamp v1',
'countries': 'GB',
'rateLimit': 1000,
'version': 'v1',
'hasCORS': True,
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27786377-8c8ab57e-5fe9-11e7-8ea4-2b05b6bcceec.jpg',
'api': 'https://www.bitstamp.net/api',
'www': 'https://www.bitstamp.net',
'doc': 'https://www.bitstamp.net/api',
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
'uid': True,
},
'api': {
'public': {
'get': [
'ticker',
'ticker_hour',
'order_book',
'transactions',
'eur_usd',
],
},
'private': {
'post': [
'balance',
'user_transactions',
'open_orders',
'order_status',
'cancel_order',
'cancel_all_orders',
'buy',
'sell',
'bitcoin_deposit_address',
'unconfirmed_btc',
'ripple_withdrawal',
'ripple_address',
'withdrawal_requests',
'bitcoin_withdrawal',
],
},
},
'markets': {
'BTC/USD': {'id': 'btcusd', 'symbol': 'BTC/USD', 'base': 'BTC', 'quote': 'USD', 'maker': 0.0025, 'taker': 0.0025},
'BTC/EUR': {'id': 'btceur', 'symbol': 'BTC/EUR', 'base': 'BTC', 'quote': 'EUR', 'maker': 0.0025, 'taker': 0.0025},
'EUR/USD': {'id': 'eurusd', 'symbol': 'EUR/USD', 'base': 'EUR', 'quote': 'USD', 'maker': 0.0025, 'taker': 0.0025},
'XRP/USD': {'id': 'xrpusd', 'symbol': 'XRP/USD', 'base': 'XRP', 'quote': 'USD', 'maker': 0.0025, 'taker': 0.0025},
'XRP/EUR': {'id': 'xrpeur', 'symbol': 'XRP/EUR', 'base': 'XRP', 'quote': 'EUR', 'maker': 0.0025, 'taker': 0.0025},
'XRP/BTC': {'id': 'xrpbtc', 'symbol': 'XRP/BTC', 'base': 'XRP', 'quote': 'BTC', 'maker': 0.0025, 'taker': 0.0025},
'LTC/USD': {'id': 'ltcusd', 'symbol': 'LTC/USD', 'base': 'LTC', 'quote': 'USD', 'maker': 0.0025, 'taker': 0.0025},
'LTC/EUR': {'id': 'ltceur', 'symbol': 'LTC/EUR', 'base': 'LTC', 'quote': 'EUR', 'maker': 0.0025, 'taker': 0.0025},
'LTC/BTC': {'id': 'ltcbtc', 'symbol': 'LTC/BTC', 'base': 'LTC', 'quote': 'BTC', 'maker': 0.0025, 'taker': 0.0025},
'ETH/USD': {'id': 'ethusd', 'symbol': 'ETH/USD', 'base': 'ETH', 'quote': 'USD', 'maker': 0.0025, 'taker': 0.0025},
'ETH/EUR': {'id': 'etheur', 'symbol': 'ETH/EUR', 'base': 'ETH', 'quote': 'EUR', 'maker': 0.0025, 'taker': 0.0025},
'ETH/BTC': {'id': 'ethbtc', 'symbol': 'ETH/BTC', 'base': 'ETH', 'quote': 'BTC', 'maker': 0.0025, 'taker': 0.0025},
},
})
async def fetch_order_book(self, symbol, params={}):
if symbol != 'BTC/USD':
raise ExchangeError(self.id + ' ' + self.version + " fetchOrderBook doesn't support " + symbol + ', use it for BTC/USD only')
orderbook = await self.publicGetOrderBook(params)
timestamp = int(orderbook['timestamp']) * 1000
return self.parse_order_book(orderbook, timestamp)
async def fetch_ticker(self, symbol, params={}):
if symbol != 'BTC/USD':
raise ExchangeError(self.id + ' ' + self.version + " fetchTicker doesn't support " + symbol + ', use it for BTC/USD only')
ticker = await self.publicGetTicker(params)
timestamp = int(ticker['timestamp']) * 1000
vwap = float(ticker['vwap'])
baseVolume = float(ticker['volume'])
quoteVolume = baseVolume * vwap
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': float(ticker['high']),
'low': float(ticker['low']),
'bid': float(ticker['bid']),
'ask': float(ticker['ask']),
'vwap': vwap,
'open': float(ticker['open']),
'close': None,
'first': None,
'last': float(ticker['last']),
'change': None,
'percentage': None,
'average': None,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}
def parse_trade(self, trade, market=None):
timestamp = None
if 'date' in trade:
timestamp = int(trade['date']) * 1000
elif 'datetime' in trade:
# timestamp = self.parse8601(trade['datetime'])
timestamp = int(trade['datetime']) * 1000
side = 'buy' if (trade['type'] == 0) else 'sell'
order = None
if 'order_id' in trade:
order = str(trade['order_id'])
if 'currency_pair' in trade:
if trade['currency_pair'] in self.markets_by_id:
market = self.markets_by_id[trade['currency_pair']]
return {
'id': str(trade['tid']),
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': market['symbol'],
'order': order,
'type': None,
'side': side,
'price': float(trade['price']),
'amount': float(trade['amount']),
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
if symbol != 'BTC/USD':
raise ExchangeError(self.id + ' ' + self.version + " fetchTrades doesn't support " + symbol + ', use it for BTC/USD only')
market = self.market(symbol)
response = await self.publicGetTransactions(self.extend({
'time': 'minute',
}, params))
return self.parse_trades(response, market, since, limit)
async def fetch_balance(self, params={}):
balance = await self.privatePostBalance()
result = {'info': balance}
currencies = list(self.currencies.keys())
for i in range(0, len(currencies)):
currency = currencies[i]
lowercase = currency.lower()
total = lowercase + '_balance'
free = lowercase + '_available'
used = lowercase + '_reserved'
account = self.account()
account['free'] = self.safe_float(balance, free, 0.0)
account['used'] = self.safe_float(balance, used, 0.0)
account['total'] = self.safe_float(balance, total, 0.0)
result[currency] = account
return self.parse_balance(result)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
if type != 'limit':
raise ExchangeError(self.id + ' ' + self.version + ' accepts limit orders only')
if symbol != 'BTC/USD':
raise ExchangeError(self.id + ' v1 supports BTC/USD orders only')
method = 'privatePost' + self.capitalize(side)
order = {
'amount': amount,
'price': price,
}
response = await getattr(self, method)(self.extend(order, params))
return {
'info': response,
'id': response['id'],
}
async def cancel_order(self, id, symbol=None, params={}):
return await self.privatePostCancelOrder({'id': id})
def parse_order_status(self, order):
if (order['status'] == 'Queue') or (order['status'] == 'Open'):
return 'open'
if order['status'] == 'Finished':
return 'closed'
return order['status']
async def fetch_order_status(self, id, symbol=None):
await self.load_markets()
response = await self.privatePostOrderStatus({'id': id})
return self.parse_order_status(response)
async def | (self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
market = None
if symbol:
market = self.market(symbol)
pair = market['id'] if market else 'all'
request = self.extend({'id': pair}, params)
response = await self.privatePostOpenOrdersId(request)
return self.parse_trades(response, market, since, limit)
async def fetch_order(self, id, symbol=None, params={}):
raise NotSupported(self.id + ' fetchOrder is not implemented yet')
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if api == 'public':
if query:
url += '?' + self.urlencode(query)
else:
self.check_required_credentials()
nonce = str(self.nonce())
auth = nonce + self.uid + self.apiKey
signature = self.encode(self.hmac(self.encode(auth), self.encode(self.secret)))
query = self.extend({
'key': self.apiKey,
'signature': signature.upper(),
'nonce': nonce,
}, query)
body = self.urlencode(query)
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
async def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = await self.fetch2(path, api, method, params, headers, body)
if 'status' in response:
if response['status'] == 'error':
raise ExchangeError(self.id + ' ' + self.json(response))
return response
| fetch_my_trades |
08-atoi.go | func ternary(cond bool, t,f int) int {
if cond {
return t
}
return f
}
// o(n) time and o(1) space
func | (str string) int {
str = strings.TrimSpace(str) // remove trailing whitespaces
res := 0
if len(str) > 0 {
sign := str[0] // get the sign if it exists
for index := ternary(sign == '+' || sign == '-', 1, 0) ;
index < len(str) && str[index]-'0' >= 0 && str[index]-'0' <= 9 && res < math.MaxInt32+1;
index++ {
digit := int(str[index]-'0')
if !(res == 0 && str[index]-'0' == 0) { // removes trailing zeroes
res = 10 * res + digit
}
}
if sign == '-' {
res = -res
}
}
if res > math.MaxInt32 {
return math.MaxInt32
} else if res < math.MinInt32 {
return math.MinInt32
}
return res
} | myAtoi |
util.go | package util
import (
"math"
s3mfile "github.com/gotracker/goaudiofile/music/tracked/s3m"
"github.com/gotracker/gomixing/panning"
"github.com/gotracker/gomixing/volume"
"gotracker/internal/song/note"
)
const (
floatDefaultC2Spd = float32(s3mfile.DefaultC2Spd)
c2Period = float32(1712)
// S3MBaseClock is the base clock speed of S3M files
S3MBaseClock = floatDefaultC2Spd * c2Period
)
var (
// DefaultVolume is the default volume value for most everything in S3M format
DefaultVolume = VolumeFromS3M(s3mfile.DefaultVolume)
| // DefaultPanning is the default panning value for unconfigured channels
DefaultPanning = PanningFromS3M(0x08)
// DefaultPanningRight is the default panning value for right channels
DefaultPanningRight = PanningFromS3M(0x0C)
)
var semitonePeriodTable = [...]float32{27392, 25856, 24384, 23040, 21696, 20480, 19328, 18240, 17216, 16256, 15360, 14496}
// CalcSemitonePeriod calculates the semitone period for S3M notes
func CalcSemitonePeriod(semi note.Semitone, ft note.Finetune, c2spd note.C2SPD) note.Period {
if semi == note.UnchangedSemitone {
panic("how?")
}
key := int(semi.Key())
octave := int(semi.Octave())
if key >= len(semitonePeriodTable) {
return nil
}
if c2spd == 0 {
c2spd = note.C2SPD(s3mfile.DefaultC2Spd)
}
if ft != 0 {
c2spd = calcFinetuneC2Spd(c2spd, ft)
}
period := (AmigaPeriod(floatDefaultC2Spd*semitonePeriodTable[key]) / AmigaPeriod(uint32(c2spd)<<octave))
period = period.AddInteger(0)
return &period
}
// calcFinetuneC2Spd calculates a new C2SPD after a finetune adjustment
func calcFinetuneC2Spd(c2spd note.C2SPD, finetune note.Finetune) note.C2SPD {
if finetune == 0 {
return c2spd
}
o := 5
st := note.Semitone(o * 12) // C-5
stShift := int8(finetune / 64)
if stShift >= 0 {
st += note.Semitone(stShift)
} else {
st -= note.Semitone(-stShift)
}
period0 := CalcSemitonePeriod(st, 0, c2spd)
period1 := CalcSemitonePeriod(st+1, 0, c2spd)
fFt := float64(finetune) / 64
iFt := math.Trunc(fFt)
f := fFt - iFt
period := period0.Lerp(f, period1)
return note.C2SPD(period.GetFrequency())
}
// VolumeFromS3M converts an S3M volume to a player volume
func VolumeFromS3M(vol s3mfile.Volume) volume.Volume {
var v volume.Volume
switch {
case vol == s3mfile.EmptyVolume:
v = volume.VolumeUseInstVol
case vol >= 63:
v = volume.Volume(63.0) / 64.0
case vol < 63:
v = volume.Volume(vol) / 64.0
default:
v = 0.0
}
return v
}
// VolumeToS3M converts a player volume to an S3M volume
func VolumeToS3M(v volume.Volume) s3mfile.Volume {
switch {
case v == volume.VolumeUseInstVol:
return s3mfile.EmptyVolume
default:
return s3mfile.Volume(v * 64.0)
}
}
// VolumeFromS3M8BitSample converts an S3M 8-bit sample volume to a player volume
func VolumeFromS3M8BitSample(vol uint8) volume.Volume {
return (volume.Volume(vol) - 128.0) / 128.0
}
// VolumeFromS3M16BitSample converts an S3M 16-bit sample volume to a player volume
func VolumeFromS3M16BitSample(vol uint16) volume.Volume {
return (volume.Volume(vol) - 32768.0) / 32768.0
}
// PanningFromS3M returns a radian panning position from an S3M panning value
func PanningFromS3M(pos uint8) panning.Position {
return panning.MakeStereoPosition(float32(pos), 0, 0x0F)
}
// NoteFromS3MNote converts an S3M file note into a player note
func NoteFromS3MNote(sn s3mfile.Note) note.Note {
switch {
case sn == s3mfile.EmptyNote:
return note.EmptyNote{}
case sn == s3mfile.StopNote:
return note.StopOrReleaseNote{}
default:
k := uint8(sn.Key()) & 0x0f
o := uint8(sn.Octave()) & 0x0f
if k < 12 && o < 10 {
s := note.Semitone(o*12 + k)
return note.Normal(s)
}
}
return note.InvalidNote{}
}
// FrequencyFromSemitone returns the frequency from the semitone (and c2spd)
func FrequencyFromSemitone(semitone note.Semitone, c2spd note.C2SPD) float32 {
period := CalcSemitonePeriod(semitone, 0, c2spd)
return float32(period.GetFrequency())
} | // DefaultPanningLeft is the default panning value for left channels
DefaultPanningLeft = PanningFromS3M(0x03) |
display_tsv.go | // Copyright (c) 2019 Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"encoding/csv"
"os"
)
type displayTSV struct {
writer *csv.Writer
disableHeader bool
}
func | (file *os.File, separator rune, disableHeader bool) DisplayHandler {
tsv := &displayTSV{
disableHeader: disableHeader,
}
tsv.writer = csv.NewWriter(file)
tsv.writer.Comma = separator
return tsv
}
func (d *displayTSV) DisplayLinks(doc *Doc) error {
if !d.disableHeader {
record := linkHeaderRecord()
if err := d.writer.Write(record); err != nil {
return err
}
}
for _, linkList := range doc.Links {
for _, link := range linkList {
record := linkToRecord(link)
if err := d.writer.Write(record); err != nil {
return err
}
}
}
d.writer.Flush()
return d.writer.Error()
}
func (d *displayTSV) DisplayHeadings(doc *Doc) error {
if !d.disableHeader {
record := headingHeaderRecord()
if err := d.writer.Write(record); err != nil {
return err
}
}
for _, l := range doc.Headings {
record := headingToRecord(l)
if err := d.writer.Write(record); err != nil {
return err
}
}
d.writer.Flush()
return d.writer.Error()
}
| NewDisplayTSV |
recommendationCounter.tsx | import React, { FC } from 'react'
import { RecommendationModel } from '@/common/types/release'
type RecommendationCounterProps = {
recommendation: RecommendationModel
}
const RecommendationCounter: FC<RecommendationCounterProps> = React.memo(
({ recommendation }): JSX.Element => {
return (
<div className="recommendation-counter"> | <span className="audio">
<i className="material-icons">headset</i>
{recommendation?.audio?.length}
</span>
<span className="text">
<i className="material-icons">article</i>
{recommendation?.text?.length}
</span>
</div>
)
},
)
export default RecommendationCounter | <span className="video">
<i className="material-icons">video_library</i>
{recommendation?.video?.length}
</span> |
correlation-id.ts | import {NextFunction, Request, Response} from "express";
import {v4} from "uuid";
import {CORRELATION_ID_HEADER_NAME} from "./constants";
export const CorrelationId = (req: Request, res: Response, next: NextFunction) => {
if (req.header(CORRELATION_ID_HEADER_NAME) == null) {
req.headers[CORRELATION_ID_HEADER_NAME] = v4(); | next();
}; | }
|
__init__.py | ###
# Copyright (c) 2005, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
"""
A plugin for time-related functions.
"""
import supybot
import supybot.world as world |
__author__ = supybot.authors.jemfinch
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
import config
import plugin
reload(plugin) # In case we're being reloaded.
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79: |
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = "%%VERSION%%" |
mongoose.ts | import { ChildLogger } from "./logger";
import { MONGODB_URI } from "./secrets";
import { Connection, ConnectionOptions, Mongoose, Types, STATES } from "mongoose";
import Bluebird from "bluebird";
const logger = ChildLogger(__filename);
export function toObjectId(id: string | Types.ObjectId): undefined | Types.ObjectId {
if (!id || "" === id) {
return undefined;
}
const stringId = id.toString().toLowerCase();
logger.log({
level: "debug",
message: `stringId::${stringId}`, | if (!Types.ObjectId.isValid(stringId)) {
return undefined;
}
const result = new Types.ObjectId(stringId);
if (result.toString() != stringId) {
return undefined;
}
return result;
}
class MongooseConnection {
private readonly mongo: Mongoose;
private readonly _options: ConnectionOptions;
private readonly _uri: string;
constructor() {
this._options = {
useUnifiedTopology: true,
useNewUrlParser: true,
useCreateIndex: true,
useFindAndModify: false
};
this.mongo = new Mongoose();
this.mongo.Promise = Bluebird;
this._uri = MONGODB_URI;
this.mongo.connection
.on(STATES[STATES.connected], () => {
logger.log({
level: "info",
message: "Database connection OPEN",
FN: "constructor"
});
})
.on(STATES[STATES.disconnected], function () {
logger.log({
level: "info",
message: "Database connection CLOSED",
FN: "constructor"
});
});
}
connect(): Promise<Mongoose> {
return this.mongo.connect(this._uri, this._options);
}
get connection(): Connection {
return this.mongo.connection;
}
get mongoose(): Mongoose {
return this.mongo;
}
get uri(): string {
return this._uri;
}
disconnect(): Promise<void> {
return this.mongo.disconnect();
}
}
export const mongooseConnection = new MongooseConnection(); | FN: "toObjectId"
});
|
two_factor_disable.py | from django.core.management.base import BaseCommand, CommandError
try:
from django.contrib.auth import get_user_model
except ImportError:
from django.contrib.auth.models import User
else:
User = get_user_model()
from django_otp import devices_for_user
class Command(BaseCommand):
| """
Command for disabling two-factor authentication for certain users.
The command accepts any number of usernames, and will remove all OTP
devices for those users.
Example usage::
manage.py disable bouke steve
"""
args = '<username username ...>'
help = 'Disables two-factor authentication for the given users'
def handle(self, *args, **options):
for username in args:
try:
user = User.objects.get_by_natural_key(username)
except User.DoesNotExist:
raise CommandError('User "%s" does not exist' % username)
for device in devices_for_user(user):
device.delete() |
|
description.go | // Protocol Buffers for Go with Gadgets
//
// Copyright (c) 2013, The GoGo Authors. All rights reserved.
// http://github.com/gogo/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/*
The description (experimental) plugin generates a Description method for each message.
The Description method returns a populated google_protobuf.FileDescriptorSet struct.
This contains the description of the files used to generate this message.
It is enabled by the following extensions:
- description
- description_all
The description plugin also generates a test given it is enabled using one of the following extensions:
- testgen
- testgen_all
Let us look at:
github.com/gogo/protobuf/test/example/example.proto
Btw all the output can be seen at:
github.com/gogo/protobuf/test/example/*
The following message:
message B {
option (gogoproto.description) = true;
optional A A = 1 [(gogoproto.nullable) = false, (gogoproto.embed) = true];
repeated bytes G = 2 [(gogoproto.customtype) = "github.com/gogo/protobuf/test/custom.Uint128", (gogoproto.nullable) = false];
}
given to the description plugin, will generate the following code:
func (this *B) Description() (desc *google_protobuf.FileDescriptorSet) {
return ExampleDescription()
}
and the following test code:
func TestDescription(t *testing9.T) {
ExampleDescription()
}
The hope is to use this struct in some way instead of reflect.
This package is subject to change, since a use has not been figured out yet.
*/
package description
import (
"bytes"
"compress/gzip"
"fmt"
"github.com/gogo/protobuf/gogoproto"
"github.com/gogo/protobuf/proto"
descriptor "github.com/gogo/protobuf/protoc-gen-gogo/descriptor"
"github.com/gogo/protobuf/protoc-gen-gogo/generator"
)
type plugin struct {
*generator.Generator
generator.PluginImports
}
func NewPlugin() *plugin {
return &plugin{}
}
func (p *plugin) Name() string {
return "description"
}
func (p *plugin) Init(g *generator.Generator) {
p.Generator = g
}
func (p *plugin) Generate(file *generator.FileDescriptor) {
used := false
localName := generator.FileName(file)
p.PluginImports = generator.NewPluginImports(p.Generator)
descriptorPkg := p.NewImport("github.com/gogo/protobuf/protoc-gen-gogo/descriptor")
protoPkg := p.NewImport("github.com/gogo/protobuf/proto")
gzipPkg := p.NewImport("compress/gzip")
bytesPkg := p.NewImport("bytes")
ioutilPkg := p.NewImport("io/ioutil")
for _, message := range file.Messages() {
if !gogoproto.HasDescription(file.FileDescriptorProto, message.DescriptorProto) {
continue
}
if message.DescriptorProto.GetOptions().GetMapEntry() {
continue
}
used = true
ccTypeName := generator.CamelCaseSlice(message.TypeName())
p.P(`func (this *`, ccTypeName, `) Description() (desc *`, descriptorPkg.Use(), `.FileDescriptorSet) {`)
p.In()
p.P(`return `, localName, `Description()`)
p.Out()
p.P(`}`)
}
if used {
p.P(`func `, localName, `Description() (desc *`, descriptorPkg.Use(), `.FileDescriptorSet) {`)
p.In()
//Don't generate SourceCodeInfo, since it will create too much code.
ss := make([]*descriptor.SourceCodeInfo, 0)
for _, f := range p.Generator.AllFiles().GetFile() {
ss = append(ss, f.SourceCodeInfo)
f.SourceCodeInfo = nil
}
b, err := proto.Marshal(p.Generator.AllFiles())
if err != nil {
panic(err)
}
for i, f := range p.Generator.AllFiles().GetFile() {
f.SourceCodeInfo = ss[i]
}
p.P(`d := &`, descriptorPkg.Use(), `.FileDescriptorSet{}`)
var buf bytes.Buffer
w, _ := gzip.NewWriterLevel(&buf, gzip.BestCompression)
w.Write(b)
w.Close()
b = buf.Bytes()
p.P("var gzipped = []byte{")
p.In()
p.P("// ", len(b), " bytes of a gzipped FileDescriptorSet")
for len(b) > 0 {
n := 16
if n > len(b) {
n = len(b)
}
s := ""
for _, c := range b[:n] {
s += fmt.Sprintf("0x%02x,", c)
}
p.P(s)
b = b[n:]
}
p.Out()
p.P("}")
p.P(`r := `, bytesPkg.Use(), `.NewReader(gzipped)`)
p.P(`gzipr, err := `, gzipPkg.Use(), `.NewReader(r)`)
p.P(`if err != nil {`)
p.In()
p.P(`panic(err)`)
p.Out()
p.P(`}`)
p.P(`ungzipped, err := `, ioutilPkg.Use(), `.ReadAll(gzipr)`)
p.P(`if err != nil {`)
p.In()
p.P(`panic(err)`)
p.Out()
p.P(`}`)
p.P(`if err := `, protoPkg.Use(), `.Unmarshal(ungzipped, d); err != nil {`)
p.In()
p.P(`panic(err)`)
p.Out()
p.P(`}`)
p.P(`return d`)
p.Out()
p.P(`}`)
}
}
func init() | {
generator.RegisterPlugin(NewPlugin())
} |
|
plot_helpers.py | import math
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import matplotlib.transforms as mtransforms
from mpl_toolkits.axes_grid.anchored_artists import AnchoredText
def setup_axes(diff=False):
fig = plt.figure()
axes = []
if diff:
gs = gridspec.GridSpec(2, 1, height_ratios=[2,1])
main_axis = plt.subplot(gs[0])
axes.append(plt.subplot(gs[0]))
axes.append(plt.subplot(gs[1], sharex=main_axis))
else:
axes.append(plt.subplot())
return fig, axes
def layout_main_and_diff_axis(fig, axes):
main_axis, diff_axis = axes
fig.subplots_adjust(hspace=0.0)
main_axis.spines['bottom'].set_visible(False)
plt.setp(main_axis.get_xticklabels(), visible=False)
main_axis.set_xlabel('')
diff_axis.xaxis.tick_bottom()
def configure_legend_on_axis(axis, title='', loc='best', borderpad=1.2, draws_background=True):
legend = axis.legend(loc=loc,
title=title,
borderaxespad=borderpad,
framealpha=0.8,
frameon=draws_background,
fancybox=draws_background)
legend.get_frame().set_color((0.96,0.96,0.96))
for line in legend.get_lines():
line.set_alpha(1.0)
def add_annotation_on_axis(axis, annotation, loc='upper right', borderpad=1.2):
codes = {'upper right': 1, 'upper left': 2, 'lower left': 3, 'lower right': 4,
'right': 5, 'center left': 6,'center right': 7,
'lower center': 8, 'upper center': 9, 'center': 10}
at = AnchoredText(annotation,
codes[loc],
frameon=False,
borderpad=borderpad, | interval = axis.get_view_interval()
ticks_in_view_interval = []
for tick, loc in zip(axis.get_major_ticks(),
axis.get_major_locator()()):
if mtransforms.interval_contains(interval, loc):
ticks_in_view_interval.append(tick)
return ticks_in_view_interval
def set_figure_size_with_width(width):
params = {'figure.figsize': figure_size_from_width(width)}
plt.rcParams.update(params)
def figure_size_from_width(width):
"""Returns a single plot figure size in inches given a width in points"""
inches_per_point = 1.0/72.27
golden_mean = (math.sqrt(5)-1.0)/2.0
inches_width = width * inches_per_point
fig_height = inches_width*golden_mean
return [inches_width,fig_height] | prop=dict(linespacing=2.5))
axis.add_artist(at)
def get_major_ticks_within_view_interval(axis): |
create-vault.component.ts | import { Component, OnInit } from '@angular/core';
import { FormBuilder, Validators } from '@angular/forms';
import { Router } from '@angular/router';
import { ToastrService } from 'ngx-toastr';
import CustomValidators from 'src/app/core/utils/customValidators';
import { VaultService } from 'src/app/core/services/vault/vault.service';
@Component({
selector: 'app-create-vault',
templateUrl: './create-vault.component.html',
styleUrls: ['./create-vault.component.scss']
})
export class CreateVaultComponent implements OnInit {
public createVaultForm;
constructor(
private fb: FormBuilder,
private router: Router,
private toastr: ToastrService,
private vaultService: VaultService) { }
ngOnInit() {
this.createVaultForm = this.fb.group({
password: ['', [Validators.required, Validators.minLength(18)]],
confirmPassword: ['', [Validators.required]]
}, { validator: CustomValidators.passwordsDoMatch.bind(this)});
}
createVault() {
if (this.createVaultForm.invalid) { | .createNewVault(this.password.value)
.subscribe(() => {
this.toastr.success('New vault created');
this.router.navigate(['home']);
});
}
get password () {
return this.createVaultForm.get('password');
}
get confirmPassword() {
return this.createVaultForm.get('confirmPassword');
}
} | return;
}
this.vaultService |
hotel_property_response.py | # coding: utf-8
"""
Amadeus Travel Innovation Sandbox
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 1.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class HotelPropertyResponse(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'property_code': 'str',
'property_name': 'str',
'location': 'Geolocation',
'address': 'Address',
'total_price': 'Amount',
'min_daily_rate': 'Amount',
'contacts': 'list[Contact]',
'amenities': 'list[Amenity]',
'awards': 'list[Award]',
'images': 'list[Image]',
'rooms': 'list[HotelRoom]',
'more_rooms_at_this_hotel': 'Link'
}
attribute_map = {
'property_code': 'property_code',
'property_name': 'property_name',
'location': 'location',
'address': 'address',
'total_price': 'total_price',
'min_daily_rate': 'min_daily_rate',
'contacts': 'contacts',
'amenities': 'amenities',
'awards': 'awards',
'images': 'images',
'rooms': 'rooms',
'more_rooms_at_this_hotel': 'more_rooms_at_this_hotel'
}
def __init__(self, property_code=None, property_name=None, location=None, address=None, total_price=None, min_daily_rate=None, contacts=None, amenities=None, awards=None, images=None, rooms=None, more_rooms_at_this_hotel=None):
"""
HotelPropertyResponse - a model defined in Swagger
"""
self._property_code = None
self._property_name = None
self._location = None
self._address = None
self._total_price = None
self._min_daily_rate = None
self._contacts = None
self._amenities = None
self._awards = None
self._images = None
self._rooms = None
self._more_rooms_at_this_hotel = None
self.property_code = property_code
self.property_name = property_name
self.location = location
if address is not None:
self.address = address
self.total_price = total_price
self.min_daily_rate = min_daily_rate
if contacts is not None:
self.contacts = contacts
if amenities is not None:
self.amenities = amenities
if awards is not None:
self.awards = awards
if images is not None:
self.images = images
if rooms is not None:
self.rooms = rooms
if more_rooms_at_this_hotel is not None:
self.more_rooms_at_this_hotel = more_rooms_at_this_hotel
@property
def property_code(self):
"""
Gets the property_code of this HotelPropertyResponse.
The 8 character property code of this given hotel. The first 2 characters of this code are the chain code that can be specified in the input. The remaining elements are proprietary to each hotel chain.
:return: The property_code of this HotelPropertyResponse.
:rtype: str
"""
return self._property_code
@property_code.setter
def property_code(self, property_code):
"""
Sets the property_code of this HotelPropertyResponse.
The 8 character property code of this given hotel. The first 2 characters of this code are the chain code that can be specified in the input. The remaining elements are proprietary to each hotel chain.
:param property_code: The property_code of this HotelPropertyResponse.
:type: str
"""
if property_code is None:
raise ValueError("Invalid value for `property_code`, must not be `None`")
self._property_code = property_code
@property
def property_name(self):
"""
Gets the property_name of this HotelPropertyResponse.
The name of this hotel.
:return: The property_name of this HotelPropertyResponse.
:rtype: str
"""
return self._property_name
@property_name.setter
def property_name(self, property_name):
"""
Sets the property_name of this HotelPropertyResponse.
The name of this hotel.
:param property_name: The property_name of this HotelPropertyResponse.
:type: str
"""
if property_name is None:
raise ValueError("Invalid value for `property_name`, must not be `None`")
self._property_name = property_name
@property
def location(self):
"""
Gets the location of this HotelPropertyResponse.
:return: The location of this HotelPropertyResponse.
:rtype: Geolocation
"""
return self._location
@location.setter
def location(self, location):
"""
Sets the location of this HotelPropertyResponse.
:param location: The location of this HotelPropertyResponse.
:type: Geolocation
"""
if location is None:
raise ValueError("Invalid value for `location`, must not be `None`")
self._location = location
@property
def address(self):
"""
Gets the address of this HotelPropertyResponse.
:return: The address of this HotelPropertyResponse.
:rtype: Address
"""
return self._address
@address.setter
def address(self, address):
"""
Sets the address of this HotelPropertyResponse.
:param address: The address of this HotelPropertyResponse.
:type: Address
"""
self._address = address
@property
def total_price(self):
"""
Gets the total_price of this HotelPropertyResponse.
The lowest price of a stay, from the given check in date to the given check out date.
:return: The total_price of this HotelPropertyResponse.
:rtype: Amount
"""
return self._total_price
@total_price.setter
def total_price(self, total_price):
"""
Sets the total_price of this HotelPropertyResponse.
The lowest price of a stay, from the given check in date to the given check out date.
:param total_price: The total_price of this HotelPropertyResponse.
:type: Amount
"""
if total_price is None:
raise ValueError("Invalid value for `total_price`, must not be `None`")
self._total_price = total_price
@property
def min_daily_rate(self):
"""
Gets the min_daily_rate of this HotelPropertyResponse.
The lowest price per day that the hotel offers between the given check-in and check-out dates. Extra taxes may apply to this rate.
:return: The min_daily_rate of this HotelPropertyResponse.
:rtype: Amount
"""
return self._min_daily_rate
@min_daily_rate.setter
def min_daily_rate(self, min_daily_rate):
"""
Sets the min_daily_rate of this HotelPropertyResponse.
The lowest price per day that the hotel offers between the given check-in and check-out dates. Extra taxes may apply to this rate.
:param min_daily_rate: The min_daily_rate of this HotelPropertyResponse.
:type: Amount
"""
if min_daily_rate is None:
raise ValueError("Invalid value for `min_daily_rate`, must not be `None`")
self._min_daily_rate = min_daily_rate
@property
def contacts(self):
"""
Gets the contacts of this HotelPropertyResponse.
An array of contact objects to tell the user how to contact the hotel. Typically includes a phone and fax number
:return: The contacts of this HotelPropertyResponse.
:rtype: list[Contact]
"""
return self._contacts
@contacts.setter
def contacts(self, contacts):
"""
Sets the contacts of this HotelPropertyResponse.
An array of contact objects to tell the user how to contact the hotel. Typically includes a phone and fax number
:param contacts: The contacts of this HotelPropertyResponse.
:type: list[Contact]
"""
self._contacts = contacts
@property
def amenities(self):
"""
Gets the amenities of this HotelPropertyResponse.
An array of amenity objects to the user what facilities this hotel might provide, such as a pool or parking. If this array is empty, it does not necessarily mean that there are no amenities available at this hotel, it could also mean that the hotel does not list their amenities in our search!
:return: The amenities of this HotelPropertyResponse.
:rtype: list[Amenity]
"""
return self._amenities
@amenities.setter
def amenities(self, amenities):
"""
Sets the amenities of this HotelPropertyResponse.
An array of amenity objects to the user what facilities this hotel might provide, such as a pool or parking. If this array is empty, it does not necessarily mean that there are no amenities available at this hotel, it could also mean that the hotel does not list their amenities in our search!
:param amenities: The amenities of this HotelPropertyResponse.
:type: list[Amenity]
"""
self._amenities = amenities
@property
def awards(self):
"""
Gets the awards of this HotelPropertyResponse.
An array of hotel award objects to give the user an expectation of the service quality at this hotel. This can be used to indicate, for example, the star rating of a hotel. If this array is empty, it does not necessarily mean that the hotel has no awards, it could simply mean that they didn't tell us about them!
:return: The awards of this HotelPropertyResponse.
:rtype: list[Award]
"""
return self._awards
@awards.setter
def awards(self, awards):
"""
Sets the awards of this HotelPropertyResponse.
An array of hotel award objects to give the user an expectation of the service quality at this hotel. This can be used to indicate, for example, the star rating of a hotel. If this array is empty, it does not necessarily mean that the hotel has no awards, it could simply mean that they didn't tell us about them!
:param awards: The awards of this HotelPropertyResponse.
:type: list[Award]
"""
self._awards = awards
@property
def images(self):
"""
Gets the images of this HotelPropertyResponse.
A selection of image objects, showing pictures of the hotel building, the entrance or some rooms, to give an indication of what to expect at this hotel. Note that redistribution of images outside Amadeus products requires licensing from our image providers: Leonardo and Ice Portal. Thus image links are returned for whitelisted Amadeus users only.
:return: The images of this HotelPropertyResponse.
:rtype: list[Image]
"""
return self._images
@images.setter
def images(self, images):
"""
Sets the images of this HotelPropertyResponse.
A selection of image objects, showing pictures of the hotel building, the entrance or some rooms, to give an indication of what to expect at this hotel. Note that redistribution of images outside Amadeus products requires licensing from our image providers: Leonardo and Ice Portal. Thus image links are returned for whitelisted Amadeus users only.
:param images: The images of this HotelPropertyResponse.
:type: list[Image]
"""
self._images = images
@property
def | (self):
"""
Gets the rooms of this HotelPropertyResponse.
Information on currently available rooms at this hotel.
:return: The rooms of this HotelPropertyResponse.
:rtype: list[HotelRoom]
"""
return self._rooms
@rooms.setter
def rooms(self, rooms):
"""
Sets the rooms of this HotelPropertyResponse.
Information on currently available rooms at this hotel.
:param rooms: The rooms of this HotelPropertyResponse.
:type: list[HotelRoom]
"""
self._rooms = rooms
@property
def more_rooms_at_this_hotel(self):
"""
Gets the more_rooms_at_this_hotel of this HotelPropertyResponse.
Provides a ready-to-use link to make a follow up request to find more available rooms at this hotel
:return: The more_rooms_at_this_hotel of this HotelPropertyResponse.
:rtype: Link
"""
return self._more_rooms_at_this_hotel
@more_rooms_at_this_hotel.setter
def more_rooms_at_this_hotel(self, more_rooms_at_this_hotel):
"""
Sets the more_rooms_at_this_hotel of this HotelPropertyResponse.
Provides a ready-to-use link to make a follow up request to find more available rooms at this hotel
:param more_rooms_at_this_hotel: The more_rooms_at_this_hotel of this HotelPropertyResponse.
:type: Link
"""
self._more_rooms_at_this_hotel = more_rooms_at_this_hotel
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, HotelPropertyResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| rooms |
delete.ts | import { Command } from '@oclif/command'
import { findTreeById, deleteTreeById } from '@/api/file'
import ora from 'ora'
export default class | extends Command {
static description = 'Delete file from cloudreve.'
static args = [{
name: 'remoteFile',
required: true,
}]
async run(): Promise<void> {
const { args } = this.parse(FileDeleteCommand)
const { remoteFile } = args
const spinner = ora('Deleting tree').start()
const tree = await findTreeById(remoteFile)
if (tree !== null) {
await deleteTreeById(tree)
spinner.succeed('Delete success')
return
}
spinner.warn('File not found')
}
}
| FileDeleteCommand |
SubscriptionDefinitions_Get.js | /**
* Auto-generated action file for "SubscriptionDefinitionsClient" API.
*
* Generated at: 2019-05-07T14:39:19.717Z
* Mass generator version: 1.1.0
*
* flowground :- Telekom iPaaS / azure-com-subscription-subscription-definitions-connector
* Copyright © 2019, Deutsche Telekom AG
* contact: [email protected]
*
* All files of this connector are licensed under the Apache 2.0 License. For details
* see the file LICENSE on the toplevel directory.
*
*
* Operation: 'SubscriptionDefinitions_Get'
* Endpoint Path: '/providers/Microsoft.Subscription/subscriptionDefinitions/{subscriptionDefinitionName}'
* Method: 'get'
*
*/
const Swagger = require('swagger-client');
const processWrapper = require('../services/process-wrapper');
const spec = require('../spec.json');
// this wrapers offers a simplified emitData(data) function
module.exports.process = processWrapper(processAction);
// parameter names for this call
const PARAMETERS = [
"subscriptionDefinitionName",
"api-version"
];
// mappings from connector field names to API field names
const FIELD_MAP = {
"subscriptionDefinitionName": "subscriptionDefinitionName",
"api_version": "api-version"
};
function processAction(msg, cfg) {
var isVerbose = process.env.debug || cfg.verbose;
if (isVerbose) {
console.log(`---MSG: ${JSON.stringify(msg)}`);
console.log(`---CFG: ${JSON.stringify(cfg)}`);
console.log(`---ENV: ${JSON.stringify(process.env)}`);
}
const contentType = undefined;
const body = msg.body;
mapFieldNames(body);
let parameters = {};
for(let param of PARAMETERS) {
parameters[param] = body[param];
}
// credentials for this operation
let securities = {};
securities['azure_auth'] = {token: cfg['azure_auth']};
let callParams = {
spec: spec,
operationId: 'SubscriptionDefinitions_Get',
pathName: '/providers/Microsoft.Subscription/subscriptionDefinitions/{subscriptionDefinitionName}',
method: 'get',
parameters: parameters,
requestContentType: contentType,
requestBody: body.requestBody,
securities: {authorized: securities},
server: spec.servers[cfg.server] || cfg.otherServer,
};
if (isVerbose) {
let out = Object.assign({}, callParams);
out.spec = '[omitted]';
console.log(`--SWAGGER CALL: ${JSON.stringify(out)}`);
}
// Call operation via Swagger client
return Swagger.execute(callParams).then(data => {
// emit a single message with data
this.emitData(data);
// if the response contains an array of entities, you can emit them one by one:
// data.obj.someItems.forEach((item) => {
// this.emitData(item);
// }
});
}
function m | obj) {
if(Array.isArray(obj)) {
obj.forEach(mapFieldNames);
}
else if(typeof obj === 'object' && obj) {
Object.keys(obj).forEach(key => {
mapFieldNames(obj[key]);
let goodKey = FIELD_MAP[key];
if(goodKey && goodKey !== key) {
obj[goodKey] = obj[key];
delete obj[key];
}
});
}
} | apFieldNames( |
keys_client.go | // Code generated by go-swagger; DO NOT EDIT.
package keys
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"github.com/go-openapi/runtime"
strfmt "github.com/go-openapi/strfmt"
)
// New creates a new keys API client.
func New(transport runtime.ClientTransport, formats strfmt.Registry) *Client {
return &Client{transport: transport, formats: formats}
}
/*
Client for keys API
*/
type Client struct {
transport runtime.ClientTransport
formats strfmt.Registry
}
/*
WeaviateKeyCreate creates a new key related to this key
Creates a new key. Input expiration date is validated on being in the future and not longer than parent expiration date.
*/
func (a *Client) WeaviateKeyCreate(params *WeaviateKeyCreateParams, authInfo runtime.ClientAuthInfoWriter) (*WeaviateKeyCreateOK, error) {
// TODO: Validate the params before sending
if params == nil {
params = NewWeaviateKeyCreateParams()
}
result, err := a.transport.Submit(&runtime.ClientOperation{
ID: "weaviate.key.create",
Method: "POST",
PathPattern: "/keys",
ProducesMediaTypes: []string{"application/json"},
ConsumesMediaTypes: []string{"application/json"},
Schemes: []string{"https"},
Params: params,
Reader: &WeaviateKeyCreateReader{formats: a.formats},
AuthInfo: authInfo,
Context: params.Context,
Client: params.HTTPClient,
})
if err != nil {
return nil, err
}
return result.(*WeaviateKeyCreateOK), nil
}
/*
WeaviateKeysChildrenGet gets an object of this keys children related to this key
Get children of a key, only one step deep. A child can have children of its own.
*/
func (a *Client) WeaviateKeysChildrenGet(params *WeaviateKeysChildrenGetParams, authInfo runtime.ClientAuthInfoWriter) (*WeaviateKeysChildrenGetOK, error) {
// TODO: Validate the params before sending
if params == nil {
params = NewWeaviateKeysChildrenGetParams()
}
result, err := a.transport.Submit(&runtime.ClientOperation{
ID: "weaviate.keys.children.get",
Method: "GET",
PathPattern: "/keys/{keyId}/children",
ProducesMediaTypes: []string{"application/json"},
ConsumesMediaTypes: []string{"application/json"},
Schemes: []string{"https"},
Params: params,
Reader: &WeaviateKeysChildrenGetReader{formats: a.formats},
AuthInfo: authInfo,
Context: params.Context,
Client: params.HTTPClient,
})
if err != nil |
return result.(*WeaviateKeysChildrenGetOK), nil
}
/*
WeaviateKeysDelete deletes a key based on its uuid related to this key
Deletes a key. Only parent or self is allowed to delete key. When you delete a key, all its children will be deleted as well.
*/
func (a *Client) WeaviateKeysDelete(params *WeaviateKeysDeleteParams, authInfo runtime.ClientAuthInfoWriter) (*WeaviateKeysDeleteNoContent, error) {
// TODO: Validate the params before sending
if params == nil {
params = NewWeaviateKeysDeleteParams()
}
result, err := a.transport.Submit(&runtime.ClientOperation{
ID: "weaviate.keys.delete",
Method: "DELETE",
PathPattern: "/keys/{keyId}",
ProducesMediaTypes: []string{"application/json"},
ConsumesMediaTypes: []string{"application/json"},
Schemes: []string{"https"},
Params: params,
Reader: &WeaviateKeysDeleteReader{formats: a.formats},
AuthInfo: authInfo,
Context: params.Context,
Client: params.HTTPClient,
})
if err != nil {
return nil, err
}
return result.(*WeaviateKeysDeleteNoContent), nil
}
/*
WeaviateKeysGet gets a key based on its uuid related to this key
Get a key.
*/
func (a *Client) WeaviateKeysGet(params *WeaviateKeysGetParams, authInfo runtime.ClientAuthInfoWriter) (*WeaviateKeysGetOK, error) {
// TODO: Validate the params before sending
if params == nil {
params = NewWeaviateKeysGetParams()
}
result, err := a.transport.Submit(&runtime.ClientOperation{
ID: "weaviate.keys.get",
Method: "GET",
PathPattern: "/keys/{keyId}",
ProducesMediaTypes: []string{"application/json"},
ConsumesMediaTypes: []string{"application/json"},
Schemes: []string{"https"},
Params: params,
Reader: &WeaviateKeysGetReader{formats: a.formats},
AuthInfo: authInfo,
Context: params.Context,
Client: params.HTTPClient,
})
if err != nil {
return nil, err
}
return result.(*WeaviateKeysGetOK), nil
}
/*
WeaviateKeysMeChildrenGet gets an object of this keys children related to the key used for request
Get children of used key, only one step deep. A child can have children of its own.
*/
func (a *Client) WeaviateKeysMeChildrenGet(params *WeaviateKeysMeChildrenGetParams, authInfo runtime.ClientAuthInfoWriter) (*WeaviateKeysMeChildrenGetOK, error) {
// TODO: Validate the params before sending
if params == nil {
params = NewWeaviateKeysMeChildrenGetParams()
}
result, err := a.transport.Submit(&runtime.ClientOperation{
ID: "weaviate.keys.me.children.get",
Method: "GET",
PathPattern: "/keys/me/children",
ProducesMediaTypes: []string{"application/json"},
ConsumesMediaTypes: []string{"application/json"},
Schemes: []string{"https"},
Params: params,
Reader: &WeaviateKeysMeChildrenGetReader{formats: a.formats},
AuthInfo: authInfo,
Context: params.Context,
Client: params.HTTPClient,
})
if err != nil {
return nil, err
}
return result.(*WeaviateKeysMeChildrenGetOK), nil
}
/*
WeaviateKeysMeGet gets a key based on the key used to do the request
Get the key-information of the key used.
*/
func (a *Client) WeaviateKeysMeGet(params *WeaviateKeysMeGetParams, authInfo runtime.ClientAuthInfoWriter) (*WeaviateKeysMeGetOK, error) {
// TODO: Validate the params before sending
if params == nil {
params = NewWeaviateKeysMeGetParams()
}
result, err := a.transport.Submit(&runtime.ClientOperation{
ID: "weaviate.keys.me.get",
Method: "GET",
PathPattern: "/keys/me",
ProducesMediaTypes: []string{"application/json"},
ConsumesMediaTypes: []string{"application/json"},
Schemes: []string{"https"},
Params: params,
Reader: &WeaviateKeysMeGetReader{formats: a.formats},
AuthInfo: authInfo,
Context: params.Context,
Client: params.HTTPClient,
})
if err != nil {
return nil, err
}
return result.(*WeaviateKeysMeGetOK), nil
}
/*
WeaviateKeysRenewToken renews a key based on the key given in the query string
Renews the related key. Validates being lower in tree than given key. Can not renew itself, unless being parent.
*/
func (a *Client) WeaviateKeysRenewToken(params *WeaviateKeysRenewTokenParams, authInfo runtime.ClientAuthInfoWriter) (*WeaviateKeysRenewTokenOK, error) {
// TODO: Validate the params before sending
if params == nil {
params = NewWeaviateKeysRenewTokenParams()
}
result, err := a.transport.Submit(&runtime.ClientOperation{
ID: "weaviate.keys.renew.token",
Method: "PUT",
PathPattern: "/keys/{keyId}/renew-token",
ProducesMediaTypes: []string{"application/json"},
ConsumesMediaTypes: []string{"application/json"},
Schemes: []string{"https"},
Params: params,
Reader: &WeaviateKeysRenewTokenReader{formats: a.formats},
AuthInfo: authInfo,
Context: params.Context,
Client: params.HTTPClient,
})
if err != nil {
return nil, err
}
return result.(*WeaviateKeysRenewTokenOK), nil
}
// SetTransport changes the transport on the client
func (a *Client) SetTransport(transport runtime.ClientTransport) {
a.transport = transport
}
| {
return nil, err
} |
storyboard.py |
#############################################################################
# Classes related to the CyTrONE storyboard
#############################################################################
class | :
# Global configuration flags
ENABLE_HTTPS = True
ENABLE_PASSWORD = True
# Separator constants
SEPARATOR1 = "-------------------------------------------------------------------------"
SEPARATOR2 = "========================================================================="
SEPARATOR3 = "#########################################################################"
# Server status keys
SERVER_STATUS_KEY = "status"
SERVER_STATUS_SUCCESS = "SUCCESS"
SERVER_STATUS_ERROR = "ERROR"
SERVER_ACTIVITY_ID_KEY = "activity_id"
SERVER_MESSAGE_KEY = "message"
# Server status messages
USER_SETTINGS_LOADING_ERROR = "Server could not load the user information database"
USER_ID_MISSING_ERROR = "User id is missing"
USER_ID_INVALID_ERROR = "User id is invalid"
USER_PASSWORD_MISSING_ERROR = "User password is missing"
USER_PASSWORD_NOT_IN_DATABASE_ERROR = "User password not in database"
USER_ID_PASSWORD_INVALID_ERROR = "User id and/or password are invalid"
ACTION_MISSING_ERROR = "Action is missing"
ACTION_INVALID_ERROR = "Action is invalid"
LANGUAGE_MISSING_ERROR = "Language is missing"
LANGUAGE_INVALID_ERROR = "Language is invalid"
TRAINING_SETTINGS_LOADING_ERROR = "Server could not load the training settings database"
INSTANCE_COUNT_MISSING_ERROR = "Instance count is missing"
INSTANCE_COUNT_INVALID_ERROR = "Instance count is invalid"
TRAINING_TYPE_MISSING_ERROR = "Training type is invalid or missing"
SCENARIO_NAME_MISSING_ERROR = "Scenario name is missing"
LEVEL_NAME_MISSING_ERROR = "Level name is missing"
SESSION_ALLOCATION_ERROR = "Server could not allocate a new session (maximum number reached)"
CONTENT_IDENTIFICATION_ERROR = "Server could not determine the training content for the specified scenario and level"
CONTENT_LOADING_ERROR = "Server could not load the training content"
CONTENT_UPLOAD_ERROR = "LMS content manager could not upload the training content"
CONTENT_REMOVAL_ERROR = "LMS content manager could not remove the training activity"
CONTENT_SERVER_ERROR = "Server could not communicate with the LMS content manager"
TEMPLATE_IDENTIFICATION_ERROR = "Server could not determine the cyber range template for the specified scenario and level"
TEMPLATE_LOADING_ERROR = "Server could not load the cyber range template"
INSTANTIATION_SERVER_ERROR = "Server could not communicate with the cyber range manager"
INSTANTIATION_ERROR = "Cyber range manager could not instantiate the cyber range"
INSTANTIATION_STATUS_FILE_NOT_FOUND = "Instantiation status file could not be found"
INSTANTIATION_CYRIS_IO_ERROR = "CyRIS execution I/O error"
INSTANTIATION_SIMULATED_ERROR = "Simulated range instantiation error"
DESTRUCTION_ERROR = "Cyber range manager could not destroy the cyber range"
DESTRUCTION_SIMULATED_ERROR = "Simulated range destruction error"
DESTRUCTION_SCRIPT_NOT_FOUND = "Destruction script could not be found"
SESSION_ID_MISSING_ERROR = "Session id is missing"
SESSION_ID_INVALID_ERROR = "Session id is invalid"
SESSION_INFO_CONSISTENCY_ERROR = "Server encountered a session information consistency issue"
| Storyboard |
Love_vs_friendship.go | package kata
func WordsToMarks(s string) (sum int) | {
for _,v := range s { sum += int(v) }
sum -= len(s) * (int('a') - 1)
return
} |
|
jquery_test.js | describe("", function() {
var rootEl;
beforeEach(function() {
rootEl = browser.rootEl;
browser.get("build/docs/examples/example-example29/index-jquery.html");
});
it('should freeze binding after its value has stabilized', function() {
var oneTimeBiding = element(by.id('one-time-binding-example'));
var normalBinding = element(by.id('normal-binding-example'));
expect(oneTimeBiding.getText()).toEqual('One time binding:');
expect(normalBinding.getText()).toEqual('Normal binding:');
element(by.buttonText('Click Me')).click();
expect(oneTimeBiding.getText()).toEqual('One time binding: Igor');
expect(normalBinding.getText()).toEqual('Normal binding: Igor');
element(by.buttonText('Click Me')).click();
expect(oneTimeBiding.getText()).toEqual('One time binding: Igor');
expect(normalBinding.getText()).toEqual('Normal binding: Misko'); |
element(by.buttonText('Click Me')).click();
element(by.buttonText('Click Me')).click();
expect(oneTimeBiding.getText()).toEqual('One time binding: Igor');
expect(normalBinding.getText()).toEqual('Normal binding: Lucas');
});
}); | |
cpu_stats_handler.rs | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use crate::message::{Message, MessageReturn};
use crate::node::Node;
use async_trait::async_trait;
use failure::{format_err, Error};
use std::rc::Rc;
/// Node: CpuStatsHandler
///
/// Summary: WIP
///
/// Message Inputs: WIP
///
/// Message Outputs: WIP
///
/// FIDL: WIP
pub struct CpuStatsHandler;
impl CpuStatsHandler {
pub fn new() -> Rc<Self> {
Rc::new(Self)
}
}
#[async_trait(?Send)]
impl Node for CpuStatsHandler {
fn name(&self) -> &'static str {
"CpuStatsHandler"
}
async fn handle_message(&self, msg: &Message<'_>) -> Result<MessageReturn, Error> |
}
| {
match msg {
_ => Err(format_err!("Unsupported message: {:?}", msg)),
}
} |
generated.rs | // =================================================================
//
// * WARNING *
//
// This file is generated!
//
// Changes made to this file will be overwritten. If changes are
// required to the generated code, the service_crategen project
// must be updated to generate the changes.
//
// =================================================================
use std::error::Error;
use std::fmt;
use std::io;
#[allow(warnings)]
use futures::future;
use futures::Future;
use rusoto_core::region;
use rusoto_core::request::{BufferedHttpResponse, DispatchSignedRequest};
use rusoto_core::{Client, RusotoFuture};
use rusoto_core::credential::{CredentialsError, ProvideAwsCredentials};
use rusoto_core::request::HttpDispatchError;
use rusoto_core::signature::SignedRequest;
use serde_json;
use serde_json::from_slice;
use serde_json::Value as SerdeJsonValue;
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct AssociateDRTLogBucketRequest {
/// <p>The Amazon S3 bucket that contains your flow logs.</p>
#[serde(rename = "LogBucket")]
pub log_bucket: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct AssociateDRTLogBucketResponse {}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct AssociateDRTRoleRequest {
/// <p>The Amazon Resource Name (ARN) of the role the DRT will use to access your AWS account.</p> <p>Prior to making the <code>AssociateDRTRole</code> request, you must attach the <a href="https://console.aws.amazon.com/iam/home?#/policies/arn:aws:iam::aws:policy/service-role/AWSShieldDRTAccessPolicy">AWSShieldDRTAccessPolicy</a> managed policy to this role. For more information see <a href=" https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_manage-attach-detach.html">Attaching and Detaching IAM Policies</a>.</p>
#[serde(rename = "RoleArn")]
pub role_arn: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct AssociateDRTRoleResponse {}
/// <p>The details of a DDoS attack.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct AttackDetail {
/// <p>List of counters that describe the attack for the specified time period.</p>
#[serde(rename = "AttackCounters")]
#[serde(skip_serializing_if = "Option::is_none")]
pub attack_counters: Option<Vec<SummarizedCounter>>,
/// <p>The unique identifier (ID) of the attack.</p>
#[serde(rename = "AttackId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub attack_id: Option<String>,
/// <p>The array of <a>AttackProperty</a> objects.</p>
#[serde(rename = "AttackProperties")]
#[serde(skip_serializing_if = "Option::is_none")]
pub attack_properties: Option<Vec<AttackProperty>>,
/// <p>The time the attack ended, in Unix time in seconds. For more information see <a href="http://docs.aws.amazon.com/cli/latest/userguide/cli-using-param.html#parameter-types">timestamp</a>.</p>
#[serde(rename = "EndTime")]
#[serde(skip_serializing_if = "Option::is_none")]
pub end_time: Option<f64>,
/// <p>List of mitigation actions taken for the attack.</p>
#[serde(rename = "Mitigations")]
#[serde(skip_serializing_if = "Option::is_none")]
pub mitigations: Option<Vec<Mitigation>>,
/// <p>The ARN (Amazon Resource Name) of the resource that was attacked.</p>
#[serde(rename = "ResourceArn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resource_arn: Option<String>,
/// <p>The time the attack started, in Unix time in seconds. For more information see <a href="http://docs.aws.amazon.com/cli/latest/userguide/cli-using-param.html#parameter-types">timestamp</a>.</p>
#[serde(rename = "StartTime")]
#[serde(skip_serializing_if = "Option::is_none")]
pub start_time: Option<f64>,
/// <p>If applicable, additional detail about the resource being attacked, for example, IP address or URL.</p>
#[serde(rename = "SubResources")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sub_resources: Option<Vec<SubResourceSummary>>,
}
/// <p>Details of the described attack.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct AttackProperty {
/// <p>The type of DDoS event that was observed. <code>NETWORK</code> indicates layer 3 and layer 4 events and <code>APPLICATION</code> indicates layer 7 events.</p>
#[serde(rename = "AttackLayer")]
#[serde(skip_serializing_if = "Option::is_none")]
pub attack_layer: Option<String>,
/// <p>Defines the DDoS attack property information that is provided.</p>
#[serde(rename = "AttackPropertyIdentifier")]
#[serde(skip_serializing_if = "Option::is_none")]
pub attack_property_identifier: Option<String>,
/// <p>The array of <a>Contributor</a> objects that includes the top five contributors to an attack. </p>
#[serde(rename = "TopContributors")]
#[serde(skip_serializing_if = "Option::is_none")]
pub top_contributors: Option<Vec<Contributor>>,
/// <p>The total contributions made to this attack by all contributors, not just the five listed in the <code>TopContributors</code> list.</p>
#[serde(rename = "Total")]
#[serde(skip_serializing_if = "Option::is_none")]
pub total: Option<i64>,
/// <p>The unit of the <code>Value</code> of the contributions.</p>
#[serde(rename = "Unit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub unit: Option<String>,
}
/// <p>Summarizes all DDoS attacks for a specified time period.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct AttackSummary {
/// <p>The unique identifier (ID) of the attack.</p>
#[serde(rename = "AttackId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub attack_id: Option<String>,
/// <p>The list of attacks for a specified time period.</p>
#[serde(rename = "AttackVectors")]
#[serde(skip_serializing_if = "Option::is_none")]
pub attack_vectors: Option<Vec<AttackVectorDescription>>,
/// <p>The end time of the attack, in Unix time in seconds. For more information see <a href="http://docs.aws.amazon.com/cli/latest/userguide/cli-using-param.html#parameter-types">timestamp</a>.</p>
#[serde(rename = "EndTime")]
#[serde(skip_serializing_if = "Option::is_none")]
pub end_time: Option<f64>,
/// <p>The ARN (Amazon Resource Name) of the resource that was attacked.</p>
#[serde(rename = "ResourceArn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resource_arn: Option<String>,
/// <p>The start time of the attack, in Unix time in seconds. For more information see <a href="http://docs.aws.amazon.com/cli/latest/userguide/cli-using-param.html#parameter-types">timestamp</a>.</p>
#[serde(rename = "StartTime")]
#[serde(skip_serializing_if = "Option::is_none")]
pub start_time: Option<f64>,
}
/// <p>Describes the attack.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct AttackVectorDescription {
/// <p><p>The attack type. Valid values:</p> <ul> <li> <p>UDP<em>TRAFFIC</p> </li> <li> <p>UDP</em>FRAGMENT</p> </li> <li> <p>GENERIC<em>UDP</em>REFLECTION</p> </li> <li> <p>DNS<em>REFLECTION</p> </li> <li> <p>NTP</em>REFLECTION</p> </li> <li> <p>CHARGEN<em>REFLECTION</p> </li> <li> <p>SSDP</em>REFLECTION</p> </li> <li> <p>PORT<em>MAPPER</p> </li> <li> <p>RIP</em>REFLECTION</p> </li> <li> <p>SNMP<em>REFLECTION</p> </li> <li> <p>MSSQL</em>REFLECTION</p> </li> <li> <p>NET<em>BIOS</em>REFLECTION</p> </li> <li> <p>SYN<em>FLOOD</p> </li> <li> <p>ACK</em>FLOOD</p> </li> <li> <p>REQUEST_FLOOD</p> </li> </ul></p>
#[serde(rename = "VectorType")]
pub vector_type: String,
}
/// <p>A contributor to the attack and their contribution.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct Contributor {
/// <p>The name of the contributor. This is dependent on the <code>AttackPropertyIdentifier</code>. For example, if the <code>AttackPropertyIdentifier</code> is <code>SOURCE_COUNTRY</code>, the <code>Name</code> could be <code>United States</code>.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The contribution of this contributor expressed in <a>Protection</a> units. For example <code>10,000</code>.</p>
#[serde(rename = "Value")]
#[serde(skip_serializing_if = "Option::is_none")]
pub value: Option<i64>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateProtectionRequest {
/// <p>Friendly name for the <code>Protection</code> you are creating.</p>
#[serde(rename = "Name")]
pub name: String,
/// <p><p>The ARN (Amazon Resource Name) of the resource to be protected.</p> <p>The ARN should be in one of the following formats:</p> <ul> <li> <p>For an Application Load Balancer: <code>arn:aws:elasticloadbalancing:<i>region</i>:<i>account-id</i>:loadbalancer/app/<i>load-balancer-name</i>/<i>load-balancer-id</i> </code> </p> </li> <li> <p>For an Elastic Load Balancer (Classic Load Balancer): <code>arn:aws:elasticloadbalancing:<i>region</i>:<i>account-id</i>:loadbalancer/<i>load-balancer-name</i> </code> </p> </li> <li> <p>For AWS CloudFront distribution: <code>arn:aws:cloudfront::<i>account-id</i>:distribution/<i>distribution-id</i> </code> </p> </li> <li> <p>For Amazon Route 53: <code>arn:aws:route53::<i>account-id</i>:hostedzone/<i>hosted-zone-id</i> </code> </p> </li> <li> <p>For an Elastic IP address: <code>arn:aws:ec2:<i>region</i>:<i>account-id</i>:eip-allocation/<i>allocation-id</i> </code> </p> </li> </ul></p>
#[serde(rename = "ResourceArn")]
pub resource_arn: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct CreateProtectionResponse {
/// <p>The unique identifier (ID) for the <a>Protection</a> object that is created.</p>
#[serde(rename = "ProtectionId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub protection_id: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct CreateSubscriptionRequest {}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct | {}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteProtectionRequest {
/// <p>The unique identifier (ID) for the <a>Protection</a> object to be deleted.</p>
#[serde(rename = "ProtectionId")]
pub protection_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteProtectionResponse {}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteSubscriptionRequest {}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DeleteSubscriptionResponse {}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DescribeAttackRequest {
/// <p>The unique identifier (ID) for the attack that to be described.</p>
#[serde(rename = "AttackId")]
pub attack_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DescribeAttackResponse {
/// <p>The attack that is described.</p>
#[serde(rename = "Attack")]
#[serde(skip_serializing_if = "Option::is_none")]
pub attack: Option<AttackDetail>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DescribeDRTAccessRequest {}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DescribeDRTAccessResponse {
/// <p>The list of Amazon S3 buckets accessed by the DRT.</p>
#[serde(rename = "LogBucketList")]
#[serde(skip_serializing_if = "Option::is_none")]
pub log_bucket_list: Option<Vec<String>>,
/// <p>The Amazon Resource Name (ARN) of the role the DRT used to access your AWS account.</p>
#[serde(rename = "RoleArn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub role_arn: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DescribeEmergencyContactSettingsRequest {}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DescribeEmergencyContactSettingsResponse {
/// <p>A list of email addresses that the DRT can use to contact you during a suspected attack.</p>
#[serde(rename = "EmergencyContactList")]
#[serde(skip_serializing_if = "Option::is_none")]
pub emergency_contact_list: Option<Vec<EmergencyContact>>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DescribeProtectionRequest {
/// <p>The unique identifier (ID) for the <a>Protection</a> object that is described.</p>
#[serde(rename = "ProtectionId")]
pub protection_id: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DescribeProtectionResponse {
/// <p>The <a>Protection</a> object that is described.</p>
#[serde(rename = "Protection")]
#[serde(skip_serializing_if = "Option::is_none")]
pub protection: Option<Protection>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DescribeSubscriptionRequest {}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DescribeSubscriptionResponse {
/// <p>The AWS Shield Advanced subscription details for an account.</p>
#[serde(rename = "Subscription")]
#[serde(skip_serializing_if = "Option::is_none")]
pub subscription: Option<Subscription>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DisassociateDRTLogBucketRequest {
/// <p>The Amazon S3 bucket that contains your flow logs.</p>
#[serde(rename = "LogBucket")]
pub log_bucket: String,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DisassociateDRTLogBucketResponse {}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DisassociateDRTRoleRequest {}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct DisassociateDRTRoleResponse {}
/// <p>Contact information that the DRT can use to contact you during a suspected attack.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct EmergencyContact {
/// <p>An email address that the DRT can use to contact you during a suspected attack.</p>
#[serde(rename = "EmailAddress")]
pub email_address: String,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetSubscriptionStateRequest {}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct GetSubscriptionStateResponse {
/// <p>The status of the subscription.</p>
#[serde(rename = "SubscriptionState")]
pub subscription_state: String,
}
/// <p>Specifies how many protections of a given type you can create.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct Limit {
/// <p>The maximum number of protections that can be created for the specified <code>Type</code>.</p>
#[serde(rename = "Max")]
#[serde(skip_serializing_if = "Option::is_none")]
pub max: Option<i64>,
/// <p>The type of protection.</p>
#[serde(rename = "Type")]
#[serde(skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListAttacksRequest {
/// <p>The end of the time period for the attacks. This is a <code>timestamp</code> type. The sample request above indicates a <code>number</code> type because the default used by WAF is Unix time in seconds. However any valid <a href="http://docs.aws.amazon.com/cli/latest/userguide/cli-using-param.html#parameter-types">timestamp format</a> is allowed. </p>
#[serde(rename = "EndTime")]
#[serde(skip_serializing_if = "Option::is_none")]
pub end_time: Option<TimeRange>,
/// <p>The maximum number of <a>AttackSummary</a> objects to be returned. If this is left blank, the first 20 results will be returned.</p>
#[serde(rename = "MaxResults")]
#[serde(skip_serializing_if = "Option::is_none")]
pub max_results: Option<i64>,
/// <p>The <code>ListAttacksRequest.NextMarker</code> value from a previous call to <code>ListAttacksRequest</code>. Pass null if this is the first call.</p>
#[serde(rename = "NextToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_token: Option<String>,
/// <p>The ARN (Amazon Resource Name) of the resource that was attacked. If this is left blank, all applicable resources for this account will be included.</p>
#[serde(rename = "ResourceArns")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resource_arns: Option<Vec<String>>,
/// <p>The start of the time period for the attacks. This is a <code>timestamp</code> type. The sample request above indicates a <code>number</code> type because the default used by WAF is Unix time in seconds. However any valid <a href="http://docs.aws.amazon.com/cli/latest/userguide/cli-using-param.html#parameter-types">timestamp format</a> is allowed. </p>
#[serde(rename = "StartTime")]
#[serde(skip_serializing_if = "Option::is_none")]
pub start_time: Option<TimeRange>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListAttacksResponse {
/// <p>The attack information for the specified time range.</p>
#[serde(rename = "AttackSummaries")]
#[serde(skip_serializing_if = "Option::is_none")]
pub attack_summaries: Option<Vec<AttackSummary>>,
/// <p>The token returned by a previous call to indicate that there is more data available. If not null, more results are available. Pass this value for the <code>NextMarker</code> parameter in a subsequent call to <code>ListAttacks</code> to retrieve the next set of items.</p>
#[serde(rename = "NextToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct ListProtectionsRequest {
/// <p>The maximum number of <a>Protection</a> objects to be returned. If this is left blank the first 20 results will be returned.</p>
#[serde(rename = "MaxResults")]
#[serde(skip_serializing_if = "Option::is_none")]
pub max_results: Option<i64>,
/// <p>The <code>ListProtectionsRequest.NextToken</code> value from a previous call to <code>ListProtections</code>. Pass null if this is the first call.</p>
#[serde(rename = "NextToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_token: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct ListProtectionsResponse {
/// <p>If you specify a value for <code>MaxResults</code> and you have more Protections than the value of MaxResults, AWS Shield Advanced returns a NextToken value in the response that allows you to list another group of Protections. For the second and subsequent ListProtections requests, specify the value of NextToken from the previous response to get information about another batch of Protections.</p>
#[serde(rename = "NextToken")]
#[serde(skip_serializing_if = "Option::is_none")]
pub next_token: Option<String>,
/// <p>The array of enabled <a>Protection</a> objects.</p>
#[serde(rename = "Protections")]
#[serde(skip_serializing_if = "Option::is_none")]
pub protections: Option<Vec<Protection>>,
}
/// <p>The mitigation applied to a DDoS attack.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct Mitigation {
/// <p>The name of the mitigation taken for this attack.</p>
#[serde(rename = "MitigationName")]
#[serde(skip_serializing_if = "Option::is_none")]
pub mitigation_name: Option<String>,
}
/// <p>An object that represents a resource that is under DDoS protection.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct Protection {
/// <p>The unique identifier (ID) of the protection.</p>
#[serde(rename = "Id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// <p>The friendly name of the protection. For example, <code>My CloudFront distributions</code>.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The ARN (Amazon Resource Name) of the AWS resource that is protected.</p>
#[serde(rename = "ResourceArn")]
#[serde(skip_serializing_if = "Option::is_none")]
pub resource_arn: Option<String>,
}
/// <p>The attack information for the specified SubResource.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct SubResourceSummary {
/// <p>The list of attack types and associated counters.</p>
#[serde(rename = "AttackVectors")]
#[serde(skip_serializing_if = "Option::is_none")]
pub attack_vectors: Option<Vec<SummarizedAttackVector>>,
/// <p>The counters that describe the details of the attack.</p>
#[serde(rename = "Counters")]
#[serde(skip_serializing_if = "Option::is_none")]
pub counters: Option<Vec<SummarizedCounter>>,
/// <p>The unique identifier (ID) of the <code>SubResource</code>.</p>
#[serde(rename = "Id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
/// <p>The <code>SubResource</code> type.</p>
#[serde(rename = "Type")]
#[serde(skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
/// <p>Information about the AWS Shield Advanced subscription for an account.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct Subscription {
/// <p>If <code>ENABLED</code>, the subscription will be automatically renewed at the end of the existing subscription period.</p> <p>When you initally create a subscription, <code>AutoRenew</code> is set to <code>ENABLED</code>. You can change this by submitting an <code>UpdateSubscription</code> request. If the <code>UpdateSubscription</code> request does not included a value for <code>AutoRenew</code>, the existing value for <code>AutoRenew</code> remains unchanged.</p>
#[serde(rename = "AutoRenew")]
#[serde(skip_serializing_if = "Option::is_none")]
pub auto_renew: Option<String>,
/// <p>The date and time your subscription will end.</p>
#[serde(rename = "EndTime")]
#[serde(skip_serializing_if = "Option::is_none")]
pub end_time: Option<f64>,
/// <p>Specifies how many protections of a given type you can create.</p>
#[serde(rename = "Limits")]
#[serde(skip_serializing_if = "Option::is_none")]
pub limits: Option<Vec<Limit>>,
/// <p>The start time of the subscription, in Unix time in seconds. For more information see <a href="http://docs.aws.amazon.com/cli/latest/userguide/cli-using-param.html#parameter-types">timestamp</a>.</p>
#[serde(rename = "StartTime")]
#[serde(skip_serializing_if = "Option::is_none")]
pub start_time: Option<f64>,
/// <p>The length, in seconds, of the AWS Shield Advanced subscription for the account.</p>
#[serde(rename = "TimeCommitmentInSeconds")]
#[serde(skip_serializing_if = "Option::is_none")]
pub time_commitment_in_seconds: Option<i64>,
}
/// <p>A summary of information about the attack.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct SummarizedAttackVector {
/// <p>The list of counters that describe the details of the attack.</p>
#[serde(rename = "VectorCounters")]
#[serde(skip_serializing_if = "Option::is_none")]
pub vector_counters: Option<Vec<SummarizedCounter>>,
/// <p>The attack type, for example, SNMP reflection or SYN flood.</p>
#[serde(rename = "VectorType")]
pub vector_type: String,
}
/// <p>The counter that describes a DDoS attack.</p>
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct SummarizedCounter {
/// <p>The average value of the counter for a specified time period.</p>
#[serde(rename = "Average")]
#[serde(skip_serializing_if = "Option::is_none")]
pub average: Option<f64>,
/// <p>The maximum value of the counter for a specified time period.</p>
#[serde(rename = "Max")]
#[serde(skip_serializing_if = "Option::is_none")]
pub max: Option<f64>,
/// <p>The number of counters for a specified time period.</p>
#[serde(rename = "N")]
#[serde(skip_serializing_if = "Option::is_none")]
pub n: Option<i64>,
/// <p>The counter name.</p>
#[serde(rename = "Name")]
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
/// <p>The total of counter values for a specified time period.</p>
#[serde(rename = "Sum")]
#[serde(skip_serializing_if = "Option::is_none")]
pub sum: Option<f64>,
/// <p>The unit of the counters.</p>
#[serde(rename = "Unit")]
#[serde(skip_serializing_if = "Option::is_none")]
pub unit: Option<String>,
}
/// <p>The time range.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct TimeRange {
/// <p>The start time, in Unix time in seconds. For more information see <a href="http://docs.aws.amazon.com/cli/latest/userguide/cli-using-param.html#parameter-types">timestamp</a>.</p>
#[serde(rename = "FromInclusive")]
#[serde(skip_serializing_if = "Option::is_none")]
pub from_inclusive: Option<f64>,
/// <p>The end time, in Unix time in seconds. For more information see <a href="http://docs.aws.amazon.com/cli/latest/userguide/cli-using-param.html#parameter-types">timestamp</a>.</p>
#[serde(rename = "ToExclusive")]
#[serde(skip_serializing_if = "Option::is_none")]
pub to_exclusive: Option<f64>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateEmergencyContactSettingsRequest {
/// <p>A list of email addresses that the DRT can use to contact you during a suspected attack.</p>
#[serde(rename = "EmergencyContactList")]
#[serde(skip_serializing_if = "Option::is_none")]
pub emergency_contact_list: Option<Vec<EmergencyContact>>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateEmergencyContactSettingsResponse {}
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateSubscriptionRequest {
/// <p>When you initally create a subscription, <code>AutoRenew</code> is set to <code>ENABLED</code>. If <code>ENABLED</code>, the subscription will be automatically renewed at the end of the existing subscription period. You can change this by submitting an <code>UpdateSubscription</code> request. If the <code>UpdateSubscription</code> request does not included a value for <code>AutoRenew</code>, the existing value for <code>AutoRenew</code> remains unchanged.</p>
#[serde(rename = "AutoRenew")]
#[serde(skip_serializing_if = "Option::is_none")]
pub auto_renew: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[cfg_attr(test, derive(Serialize))]
pub struct UpdateSubscriptionResponse {}
/// Errors returned by AssociateDRTLogBucket
#[derive(Debug, PartialEq)]
pub enum AssociateDRTLogBucketError {
/// <p>In order to grant the necessary access to the DDoS Response Team, the user submitting <code>AssociateDRTRole</code> must have the <code>iam:PassRole</code> permission. This error indicates the user did not have the appropriate permissions. For more information, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_passrole.html">Granting a User Permissions to Pass a Role to an AWS Service</a>. </p>
AccessDeniedForDependency(String),
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception that indicates that the operation would not cause any change to occur.</p>
InvalidOperation(String),
/// <p>Exception that indicates that the parameters passed to the API are invalid. </p>
InvalidParameter(String),
/// <p>Exception that indicates that the operation would exceed a limit.</p> <p> <code>Type</code> is the type of limit that would be exceeded.</p> <p> <code>Limit</code> is the threshold that would be exceeded.</p>
LimitsExceeded(String),
/// <p>The ARN of the role that you specifed does not exist.</p>
NoAssociatedRole(String),
/// <p>Exception that indicates that the protection state has been modified by another client. You can retry the request.</p>
OptimisticLock(String),
/// <p>Exception indicating the specified resource does not exist.</p>
ResourceNotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl AssociateDRTLogBucketError {
pub fn from_response(res: BufferedHttpResponse) -> AssociateDRTLogBucketError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"AccessDeniedForDependencyException" => {
return AssociateDRTLogBucketError::AccessDeniedForDependency(String::from(
error_message,
))
}
"InternalErrorException" => {
return AssociateDRTLogBucketError::InternalError(String::from(error_message))
}
"InvalidOperationException" => {
return AssociateDRTLogBucketError::InvalidOperation(String::from(error_message))
}
"InvalidParameterException" => {
return AssociateDRTLogBucketError::InvalidParameter(String::from(error_message))
}
"LimitsExceededException" => {
return AssociateDRTLogBucketError::LimitsExceeded(String::from(error_message))
}
"NoAssociatedRoleException" => {
return AssociateDRTLogBucketError::NoAssociatedRole(String::from(error_message))
}
"OptimisticLockException" => {
return AssociateDRTLogBucketError::OptimisticLock(String::from(error_message))
}
"ResourceNotFoundException" => {
return AssociateDRTLogBucketError::ResourceNotFound(String::from(error_message))
}
"ValidationException" => {
return AssociateDRTLogBucketError::Validation(error_message.to_string())
}
_ => {}
}
}
return AssociateDRTLogBucketError::Unknown(res);
}
}
impl From<serde_json::error::Error> for AssociateDRTLogBucketError {
fn from(err: serde_json::error::Error) -> AssociateDRTLogBucketError {
AssociateDRTLogBucketError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for AssociateDRTLogBucketError {
fn from(err: CredentialsError) -> AssociateDRTLogBucketError {
AssociateDRTLogBucketError::Credentials(err)
}
}
impl From<HttpDispatchError> for AssociateDRTLogBucketError {
fn from(err: HttpDispatchError) -> AssociateDRTLogBucketError {
AssociateDRTLogBucketError::HttpDispatch(err)
}
}
impl From<io::Error> for AssociateDRTLogBucketError {
fn from(err: io::Error) -> AssociateDRTLogBucketError {
AssociateDRTLogBucketError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for AssociateDRTLogBucketError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for AssociateDRTLogBucketError {
fn description(&self) -> &str {
match *self {
AssociateDRTLogBucketError::AccessDeniedForDependency(ref cause) => cause,
AssociateDRTLogBucketError::InternalError(ref cause) => cause,
AssociateDRTLogBucketError::InvalidOperation(ref cause) => cause,
AssociateDRTLogBucketError::InvalidParameter(ref cause) => cause,
AssociateDRTLogBucketError::LimitsExceeded(ref cause) => cause,
AssociateDRTLogBucketError::NoAssociatedRole(ref cause) => cause,
AssociateDRTLogBucketError::OptimisticLock(ref cause) => cause,
AssociateDRTLogBucketError::ResourceNotFound(ref cause) => cause,
AssociateDRTLogBucketError::Validation(ref cause) => cause,
AssociateDRTLogBucketError::Credentials(ref err) => err.description(),
AssociateDRTLogBucketError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
AssociateDRTLogBucketError::ParseError(ref cause) => cause,
AssociateDRTLogBucketError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by AssociateDRTRole
#[derive(Debug, PartialEq)]
pub enum AssociateDRTRoleError {
/// <p>In order to grant the necessary access to the DDoS Response Team, the user submitting <code>AssociateDRTRole</code> must have the <code>iam:PassRole</code> permission. This error indicates the user did not have the appropriate permissions. For more information, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_passrole.html">Granting a User Permissions to Pass a Role to an AWS Service</a>. </p>
AccessDeniedForDependency(String),
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception that indicates that the operation would not cause any change to occur.</p>
InvalidOperation(String),
/// <p>Exception that indicates that the parameters passed to the API are invalid. </p>
InvalidParameter(String),
/// <p>Exception that indicates that the protection state has been modified by another client. You can retry the request.</p>
OptimisticLock(String),
/// <p>Exception indicating the specified resource does not exist.</p>
ResourceNotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl AssociateDRTRoleError {
pub fn from_response(res: BufferedHttpResponse) -> AssociateDRTRoleError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"AccessDeniedForDependencyException" => {
return AssociateDRTRoleError::AccessDeniedForDependency(String::from(
error_message,
))
}
"InternalErrorException" => {
return AssociateDRTRoleError::InternalError(String::from(error_message))
}
"InvalidOperationException" => {
return AssociateDRTRoleError::InvalidOperation(String::from(error_message))
}
"InvalidParameterException" => {
return AssociateDRTRoleError::InvalidParameter(String::from(error_message))
}
"OptimisticLockException" => {
return AssociateDRTRoleError::OptimisticLock(String::from(error_message))
}
"ResourceNotFoundException" => {
return AssociateDRTRoleError::ResourceNotFound(String::from(error_message))
}
"ValidationException" => {
return AssociateDRTRoleError::Validation(error_message.to_string())
}
_ => {}
}
}
return AssociateDRTRoleError::Unknown(res);
}
}
impl From<serde_json::error::Error> for AssociateDRTRoleError {
fn from(err: serde_json::error::Error) -> AssociateDRTRoleError {
AssociateDRTRoleError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for AssociateDRTRoleError {
fn from(err: CredentialsError) -> AssociateDRTRoleError {
AssociateDRTRoleError::Credentials(err)
}
}
impl From<HttpDispatchError> for AssociateDRTRoleError {
fn from(err: HttpDispatchError) -> AssociateDRTRoleError {
AssociateDRTRoleError::HttpDispatch(err)
}
}
impl From<io::Error> for AssociateDRTRoleError {
fn from(err: io::Error) -> AssociateDRTRoleError {
AssociateDRTRoleError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for AssociateDRTRoleError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for AssociateDRTRoleError {
fn description(&self) -> &str {
match *self {
AssociateDRTRoleError::AccessDeniedForDependency(ref cause) => cause,
AssociateDRTRoleError::InternalError(ref cause) => cause,
AssociateDRTRoleError::InvalidOperation(ref cause) => cause,
AssociateDRTRoleError::InvalidParameter(ref cause) => cause,
AssociateDRTRoleError::OptimisticLock(ref cause) => cause,
AssociateDRTRoleError::ResourceNotFound(ref cause) => cause,
AssociateDRTRoleError::Validation(ref cause) => cause,
AssociateDRTRoleError::Credentials(ref err) => err.description(),
AssociateDRTRoleError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
AssociateDRTRoleError::ParseError(ref cause) => cause,
AssociateDRTRoleError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by CreateProtection
#[derive(Debug, PartialEq)]
pub enum CreateProtectionError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception that indicates that the operation would not cause any change to occur.</p>
InvalidOperation(String),
/// <p>Exception that indicates that the resource is invalid. You might not have access to the resource, or the resource might not exist.</p>
InvalidResource(String),
/// <p>Exception that indicates that the operation would exceed a limit.</p> <p> <code>Type</code> is the type of limit that would be exceeded.</p> <p> <code>Limit</code> is the threshold that would be exceeded.</p>
LimitsExceeded(String),
/// <p>Exception that indicates that the protection state has been modified by another client. You can retry the request.</p>
OptimisticLock(String),
/// <p>Exception indicating the specified resource already exists.</p>
ResourceAlreadyExists(String),
/// <p>Exception indicating the specified resource does not exist.</p>
ResourceNotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl CreateProtectionError {
pub fn from_response(res: BufferedHttpResponse) -> CreateProtectionError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return CreateProtectionError::InternalError(String::from(error_message))
}
"InvalidOperationException" => {
return CreateProtectionError::InvalidOperation(String::from(error_message))
}
"InvalidResourceException" => {
return CreateProtectionError::InvalidResource(String::from(error_message))
}
"LimitsExceededException" => {
return CreateProtectionError::LimitsExceeded(String::from(error_message))
}
"OptimisticLockException" => {
return CreateProtectionError::OptimisticLock(String::from(error_message))
}
"ResourceAlreadyExistsException" => {
return CreateProtectionError::ResourceAlreadyExists(String::from(error_message))
}
"ResourceNotFoundException" => {
return CreateProtectionError::ResourceNotFound(String::from(error_message))
}
"ValidationException" => {
return CreateProtectionError::Validation(error_message.to_string())
}
_ => {}
}
}
return CreateProtectionError::Unknown(res);
}
}
impl From<serde_json::error::Error> for CreateProtectionError {
fn from(err: serde_json::error::Error) -> CreateProtectionError {
CreateProtectionError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for CreateProtectionError {
fn from(err: CredentialsError) -> CreateProtectionError {
CreateProtectionError::Credentials(err)
}
}
impl From<HttpDispatchError> for CreateProtectionError {
fn from(err: HttpDispatchError) -> CreateProtectionError {
CreateProtectionError::HttpDispatch(err)
}
}
impl From<io::Error> for CreateProtectionError {
fn from(err: io::Error) -> CreateProtectionError {
CreateProtectionError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for CreateProtectionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateProtectionError {
fn description(&self) -> &str {
match *self {
CreateProtectionError::InternalError(ref cause) => cause,
CreateProtectionError::InvalidOperation(ref cause) => cause,
CreateProtectionError::InvalidResource(ref cause) => cause,
CreateProtectionError::LimitsExceeded(ref cause) => cause,
CreateProtectionError::OptimisticLock(ref cause) => cause,
CreateProtectionError::ResourceAlreadyExists(ref cause) => cause,
CreateProtectionError::ResourceNotFound(ref cause) => cause,
CreateProtectionError::Validation(ref cause) => cause,
CreateProtectionError::Credentials(ref err) => err.description(),
CreateProtectionError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
CreateProtectionError::ParseError(ref cause) => cause,
CreateProtectionError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by CreateSubscription
#[derive(Debug, PartialEq)]
pub enum CreateSubscriptionError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception indicating the specified resource already exists.</p>
ResourceAlreadyExists(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl CreateSubscriptionError {
pub fn from_response(res: BufferedHttpResponse) -> CreateSubscriptionError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return CreateSubscriptionError::InternalError(String::from(error_message))
}
"ResourceAlreadyExistsException" => {
return CreateSubscriptionError::ResourceAlreadyExists(String::from(
error_message,
))
}
"ValidationException" => {
return CreateSubscriptionError::Validation(error_message.to_string())
}
_ => {}
}
}
return CreateSubscriptionError::Unknown(res);
}
}
impl From<serde_json::error::Error> for CreateSubscriptionError {
fn from(err: serde_json::error::Error) -> CreateSubscriptionError {
CreateSubscriptionError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for CreateSubscriptionError {
fn from(err: CredentialsError) -> CreateSubscriptionError {
CreateSubscriptionError::Credentials(err)
}
}
impl From<HttpDispatchError> for CreateSubscriptionError {
fn from(err: HttpDispatchError) -> CreateSubscriptionError {
CreateSubscriptionError::HttpDispatch(err)
}
}
impl From<io::Error> for CreateSubscriptionError {
fn from(err: io::Error) -> CreateSubscriptionError {
CreateSubscriptionError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for CreateSubscriptionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for CreateSubscriptionError {
fn description(&self) -> &str {
match *self {
CreateSubscriptionError::InternalError(ref cause) => cause,
CreateSubscriptionError::ResourceAlreadyExists(ref cause) => cause,
CreateSubscriptionError::Validation(ref cause) => cause,
CreateSubscriptionError::Credentials(ref err) => err.description(),
CreateSubscriptionError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
CreateSubscriptionError::ParseError(ref cause) => cause,
CreateSubscriptionError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DeleteProtection
#[derive(Debug, PartialEq)]
pub enum DeleteProtectionError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception that indicates that the protection state has been modified by another client. You can retry the request.</p>
OptimisticLock(String),
/// <p>Exception indicating the specified resource does not exist.</p>
ResourceNotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DeleteProtectionError {
pub fn from_response(res: BufferedHttpResponse) -> DeleteProtectionError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return DeleteProtectionError::InternalError(String::from(error_message))
}
"OptimisticLockException" => {
return DeleteProtectionError::OptimisticLock(String::from(error_message))
}
"ResourceNotFoundException" => {
return DeleteProtectionError::ResourceNotFound(String::from(error_message))
}
"ValidationException" => {
return DeleteProtectionError::Validation(error_message.to_string())
}
_ => {}
}
}
return DeleteProtectionError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DeleteProtectionError {
fn from(err: serde_json::error::Error) -> DeleteProtectionError {
DeleteProtectionError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DeleteProtectionError {
fn from(err: CredentialsError) -> DeleteProtectionError {
DeleteProtectionError::Credentials(err)
}
}
impl From<HttpDispatchError> for DeleteProtectionError {
fn from(err: HttpDispatchError) -> DeleteProtectionError {
DeleteProtectionError::HttpDispatch(err)
}
}
impl From<io::Error> for DeleteProtectionError {
fn from(err: io::Error) -> DeleteProtectionError {
DeleteProtectionError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DeleteProtectionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteProtectionError {
fn description(&self) -> &str {
match *self {
DeleteProtectionError::InternalError(ref cause) => cause,
DeleteProtectionError::OptimisticLock(ref cause) => cause,
DeleteProtectionError::ResourceNotFound(ref cause) => cause,
DeleteProtectionError::Validation(ref cause) => cause,
DeleteProtectionError::Credentials(ref err) => err.description(),
DeleteProtectionError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
DeleteProtectionError::ParseError(ref cause) => cause,
DeleteProtectionError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DeleteSubscription
#[derive(Debug, PartialEq)]
pub enum DeleteSubscriptionError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>You are trying to update a subscription that has not yet completed the 1-year commitment. You can change the <code>AutoRenew</code> parameter during the last 30 days of your subscription. This exception indicates that you are attempting to change <code>AutoRenew</code> prior to that period.</p>
LockedSubscription(String),
/// <p>Exception indicating the specified resource does not exist.</p>
ResourceNotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DeleteSubscriptionError {
pub fn from_response(res: BufferedHttpResponse) -> DeleteSubscriptionError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return DeleteSubscriptionError::InternalError(String::from(error_message))
}
"LockedSubscriptionException" => {
return DeleteSubscriptionError::LockedSubscription(String::from(error_message))
}
"ResourceNotFoundException" => {
return DeleteSubscriptionError::ResourceNotFound(String::from(error_message))
}
"ValidationException" => {
return DeleteSubscriptionError::Validation(error_message.to_string())
}
_ => {}
}
}
return DeleteSubscriptionError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DeleteSubscriptionError {
fn from(err: serde_json::error::Error) -> DeleteSubscriptionError {
DeleteSubscriptionError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DeleteSubscriptionError {
fn from(err: CredentialsError) -> DeleteSubscriptionError {
DeleteSubscriptionError::Credentials(err)
}
}
impl From<HttpDispatchError> for DeleteSubscriptionError {
fn from(err: HttpDispatchError) -> DeleteSubscriptionError {
DeleteSubscriptionError::HttpDispatch(err)
}
}
impl From<io::Error> for DeleteSubscriptionError {
fn from(err: io::Error) -> DeleteSubscriptionError {
DeleteSubscriptionError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DeleteSubscriptionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteSubscriptionError {
fn description(&self) -> &str {
match *self {
DeleteSubscriptionError::InternalError(ref cause) => cause,
DeleteSubscriptionError::LockedSubscription(ref cause) => cause,
DeleteSubscriptionError::ResourceNotFound(ref cause) => cause,
DeleteSubscriptionError::Validation(ref cause) => cause,
DeleteSubscriptionError::Credentials(ref err) => err.description(),
DeleteSubscriptionError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
DeleteSubscriptionError::ParseError(ref cause) => cause,
DeleteSubscriptionError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DescribeAttack
#[derive(Debug, PartialEq)]
pub enum DescribeAttackError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception that indicates that the parameters passed to the API are invalid. </p>
InvalidParameter(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DescribeAttackError {
pub fn from_response(res: BufferedHttpResponse) -> DescribeAttackError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return DescribeAttackError::InternalError(String::from(error_message))
}
"InvalidParameterException" => {
return DescribeAttackError::InvalidParameter(String::from(error_message))
}
"ValidationException" => {
return DescribeAttackError::Validation(error_message.to_string())
}
_ => {}
}
}
return DescribeAttackError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DescribeAttackError {
fn from(err: serde_json::error::Error) -> DescribeAttackError {
DescribeAttackError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DescribeAttackError {
fn from(err: CredentialsError) -> DescribeAttackError {
DescribeAttackError::Credentials(err)
}
}
impl From<HttpDispatchError> for DescribeAttackError {
fn from(err: HttpDispatchError) -> DescribeAttackError {
DescribeAttackError::HttpDispatch(err)
}
}
impl From<io::Error> for DescribeAttackError {
fn from(err: io::Error) -> DescribeAttackError {
DescribeAttackError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DescribeAttackError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DescribeAttackError {
fn description(&self) -> &str {
match *self {
DescribeAttackError::InternalError(ref cause) => cause,
DescribeAttackError::InvalidParameter(ref cause) => cause,
DescribeAttackError::Validation(ref cause) => cause,
DescribeAttackError::Credentials(ref err) => err.description(),
DescribeAttackError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
DescribeAttackError::ParseError(ref cause) => cause,
DescribeAttackError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DescribeDRTAccess
#[derive(Debug, PartialEq)]
pub enum DescribeDRTAccessError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception indicating the specified resource does not exist.</p>
ResourceNotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DescribeDRTAccessError {
pub fn from_response(res: BufferedHttpResponse) -> DescribeDRTAccessError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return DescribeDRTAccessError::InternalError(String::from(error_message))
}
"ResourceNotFoundException" => {
return DescribeDRTAccessError::ResourceNotFound(String::from(error_message))
}
"ValidationException" => {
return DescribeDRTAccessError::Validation(error_message.to_string())
}
_ => {}
}
}
return DescribeDRTAccessError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DescribeDRTAccessError {
fn from(err: serde_json::error::Error) -> DescribeDRTAccessError {
DescribeDRTAccessError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DescribeDRTAccessError {
fn from(err: CredentialsError) -> DescribeDRTAccessError {
DescribeDRTAccessError::Credentials(err)
}
}
impl From<HttpDispatchError> for DescribeDRTAccessError {
fn from(err: HttpDispatchError) -> DescribeDRTAccessError {
DescribeDRTAccessError::HttpDispatch(err)
}
}
impl From<io::Error> for DescribeDRTAccessError {
fn from(err: io::Error) -> DescribeDRTAccessError {
DescribeDRTAccessError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DescribeDRTAccessError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DescribeDRTAccessError {
fn description(&self) -> &str {
match *self {
DescribeDRTAccessError::InternalError(ref cause) => cause,
DescribeDRTAccessError::ResourceNotFound(ref cause) => cause,
DescribeDRTAccessError::Validation(ref cause) => cause,
DescribeDRTAccessError::Credentials(ref err) => err.description(),
DescribeDRTAccessError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
DescribeDRTAccessError::ParseError(ref cause) => cause,
DescribeDRTAccessError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DescribeEmergencyContactSettings
#[derive(Debug, PartialEq)]
pub enum DescribeEmergencyContactSettingsError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception indicating the specified resource does not exist.</p>
ResourceNotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DescribeEmergencyContactSettingsError {
pub fn from_response(res: BufferedHttpResponse) -> DescribeEmergencyContactSettingsError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return DescribeEmergencyContactSettingsError::InternalError(String::from(
error_message,
))
}
"ResourceNotFoundException" => {
return DescribeEmergencyContactSettingsError::ResourceNotFound(String::from(
error_message,
))
}
"ValidationException" => {
return DescribeEmergencyContactSettingsError::Validation(
error_message.to_string(),
)
}
_ => {}
}
}
return DescribeEmergencyContactSettingsError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DescribeEmergencyContactSettingsError {
fn from(err: serde_json::error::Error) -> DescribeEmergencyContactSettingsError {
DescribeEmergencyContactSettingsError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DescribeEmergencyContactSettingsError {
fn from(err: CredentialsError) -> DescribeEmergencyContactSettingsError {
DescribeEmergencyContactSettingsError::Credentials(err)
}
}
impl From<HttpDispatchError> for DescribeEmergencyContactSettingsError {
fn from(err: HttpDispatchError) -> DescribeEmergencyContactSettingsError {
DescribeEmergencyContactSettingsError::HttpDispatch(err)
}
}
impl From<io::Error> for DescribeEmergencyContactSettingsError {
fn from(err: io::Error) -> DescribeEmergencyContactSettingsError {
DescribeEmergencyContactSettingsError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DescribeEmergencyContactSettingsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DescribeEmergencyContactSettingsError {
fn description(&self) -> &str {
match *self {
DescribeEmergencyContactSettingsError::InternalError(ref cause) => cause,
DescribeEmergencyContactSettingsError::ResourceNotFound(ref cause) => cause,
DescribeEmergencyContactSettingsError::Validation(ref cause) => cause,
DescribeEmergencyContactSettingsError::Credentials(ref err) => err.description(),
DescribeEmergencyContactSettingsError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
DescribeEmergencyContactSettingsError::ParseError(ref cause) => cause,
DescribeEmergencyContactSettingsError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DescribeProtection
#[derive(Debug, PartialEq)]
pub enum DescribeProtectionError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception indicating the specified resource does not exist.</p>
ResourceNotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DescribeProtectionError {
pub fn from_response(res: BufferedHttpResponse) -> DescribeProtectionError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return DescribeProtectionError::InternalError(String::from(error_message))
}
"ResourceNotFoundException" => {
return DescribeProtectionError::ResourceNotFound(String::from(error_message))
}
"ValidationException" => {
return DescribeProtectionError::Validation(error_message.to_string())
}
_ => {}
}
}
return DescribeProtectionError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DescribeProtectionError {
fn from(err: serde_json::error::Error) -> DescribeProtectionError {
DescribeProtectionError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DescribeProtectionError {
fn from(err: CredentialsError) -> DescribeProtectionError {
DescribeProtectionError::Credentials(err)
}
}
impl From<HttpDispatchError> for DescribeProtectionError {
fn from(err: HttpDispatchError) -> DescribeProtectionError {
DescribeProtectionError::HttpDispatch(err)
}
}
impl From<io::Error> for DescribeProtectionError {
fn from(err: io::Error) -> DescribeProtectionError {
DescribeProtectionError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DescribeProtectionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DescribeProtectionError {
fn description(&self) -> &str {
match *self {
DescribeProtectionError::InternalError(ref cause) => cause,
DescribeProtectionError::ResourceNotFound(ref cause) => cause,
DescribeProtectionError::Validation(ref cause) => cause,
DescribeProtectionError::Credentials(ref err) => err.description(),
DescribeProtectionError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
DescribeProtectionError::ParseError(ref cause) => cause,
DescribeProtectionError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DescribeSubscription
#[derive(Debug, PartialEq)]
pub enum DescribeSubscriptionError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception indicating the specified resource does not exist.</p>
ResourceNotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DescribeSubscriptionError {
pub fn from_response(res: BufferedHttpResponse) -> DescribeSubscriptionError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return DescribeSubscriptionError::InternalError(String::from(error_message))
}
"ResourceNotFoundException" => {
return DescribeSubscriptionError::ResourceNotFound(String::from(error_message))
}
"ValidationException" => {
return DescribeSubscriptionError::Validation(error_message.to_string())
}
_ => {}
}
}
return DescribeSubscriptionError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DescribeSubscriptionError {
fn from(err: serde_json::error::Error) -> DescribeSubscriptionError {
DescribeSubscriptionError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DescribeSubscriptionError {
fn from(err: CredentialsError) -> DescribeSubscriptionError {
DescribeSubscriptionError::Credentials(err)
}
}
impl From<HttpDispatchError> for DescribeSubscriptionError {
fn from(err: HttpDispatchError) -> DescribeSubscriptionError {
DescribeSubscriptionError::HttpDispatch(err)
}
}
impl From<io::Error> for DescribeSubscriptionError {
fn from(err: io::Error) -> DescribeSubscriptionError {
DescribeSubscriptionError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DescribeSubscriptionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DescribeSubscriptionError {
fn description(&self) -> &str {
match *self {
DescribeSubscriptionError::InternalError(ref cause) => cause,
DescribeSubscriptionError::ResourceNotFound(ref cause) => cause,
DescribeSubscriptionError::Validation(ref cause) => cause,
DescribeSubscriptionError::Credentials(ref err) => err.description(),
DescribeSubscriptionError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
DescribeSubscriptionError::ParseError(ref cause) => cause,
DescribeSubscriptionError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DisassociateDRTLogBucket
#[derive(Debug, PartialEq)]
pub enum DisassociateDRTLogBucketError {
/// <p>In order to grant the necessary access to the DDoS Response Team, the user submitting <code>AssociateDRTRole</code> must have the <code>iam:PassRole</code> permission. This error indicates the user did not have the appropriate permissions. For more information, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_passrole.html">Granting a User Permissions to Pass a Role to an AWS Service</a>. </p>
AccessDeniedForDependency(String),
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception that indicates that the operation would not cause any change to occur.</p>
InvalidOperation(String),
/// <p>The ARN of the role that you specifed does not exist.</p>
NoAssociatedRole(String),
/// <p>Exception that indicates that the protection state has been modified by another client. You can retry the request.</p>
OptimisticLock(String),
/// <p>Exception indicating the specified resource does not exist.</p>
ResourceNotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DisassociateDRTLogBucketError {
pub fn from_response(res: BufferedHttpResponse) -> DisassociateDRTLogBucketError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"AccessDeniedForDependencyException" => {
return DisassociateDRTLogBucketError::AccessDeniedForDependency(String::from(
error_message,
))
}
"InternalErrorException" => {
return DisassociateDRTLogBucketError::InternalError(String::from(error_message))
}
"InvalidOperationException" => {
return DisassociateDRTLogBucketError::InvalidOperation(String::from(
error_message,
))
}
"NoAssociatedRoleException" => {
return DisassociateDRTLogBucketError::NoAssociatedRole(String::from(
error_message,
))
}
"OptimisticLockException" => {
return DisassociateDRTLogBucketError::OptimisticLock(String::from(
error_message,
))
}
"ResourceNotFoundException" => {
return DisassociateDRTLogBucketError::ResourceNotFound(String::from(
error_message,
))
}
"ValidationException" => {
return DisassociateDRTLogBucketError::Validation(error_message.to_string())
}
_ => {}
}
}
return DisassociateDRTLogBucketError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DisassociateDRTLogBucketError {
fn from(err: serde_json::error::Error) -> DisassociateDRTLogBucketError {
DisassociateDRTLogBucketError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DisassociateDRTLogBucketError {
fn from(err: CredentialsError) -> DisassociateDRTLogBucketError {
DisassociateDRTLogBucketError::Credentials(err)
}
}
impl From<HttpDispatchError> for DisassociateDRTLogBucketError {
fn from(err: HttpDispatchError) -> DisassociateDRTLogBucketError {
DisassociateDRTLogBucketError::HttpDispatch(err)
}
}
impl From<io::Error> for DisassociateDRTLogBucketError {
fn from(err: io::Error) -> DisassociateDRTLogBucketError {
DisassociateDRTLogBucketError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DisassociateDRTLogBucketError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DisassociateDRTLogBucketError {
fn description(&self) -> &str {
match *self {
DisassociateDRTLogBucketError::AccessDeniedForDependency(ref cause) => cause,
DisassociateDRTLogBucketError::InternalError(ref cause) => cause,
DisassociateDRTLogBucketError::InvalidOperation(ref cause) => cause,
DisassociateDRTLogBucketError::NoAssociatedRole(ref cause) => cause,
DisassociateDRTLogBucketError::OptimisticLock(ref cause) => cause,
DisassociateDRTLogBucketError::ResourceNotFound(ref cause) => cause,
DisassociateDRTLogBucketError::Validation(ref cause) => cause,
DisassociateDRTLogBucketError::Credentials(ref err) => err.description(),
DisassociateDRTLogBucketError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
DisassociateDRTLogBucketError::ParseError(ref cause) => cause,
DisassociateDRTLogBucketError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by DisassociateDRTRole
#[derive(Debug, PartialEq)]
pub enum DisassociateDRTRoleError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception that indicates that the operation would not cause any change to occur.</p>
InvalidOperation(String),
/// <p>Exception that indicates that the protection state has been modified by another client. You can retry the request.</p>
OptimisticLock(String),
/// <p>Exception indicating the specified resource does not exist.</p>
ResourceNotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DisassociateDRTRoleError {
pub fn from_response(res: BufferedHttpResponse) -> DisassociateDRTRoleError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return DisassociateDRTRoleError::InternalError(String::from(error_message))
}
"InvalidOperationException" => {
return DisassociateDRTRoleError::InvalidOperation(String::from(error_message))
}
"OptimisticLockException" => {
return DisassociateDRTRoleError::OptimisticLock(String::from(error_message))
}
"ResourceNotFoundException" => {
return DisassociateDRTRoleError::ResourceNotFound(String::from(error_message))
}
"ValidationException" => {
return DisassociateDRTRoleError::Validation(error_message.to_string())
}
_ => {}
}
}
return DisassociateDRTRoleError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DisassociateDRTRoleError {
fn from(err: serde_json::error::Error) -> DisassociateDRTRoleError {
DisassociateDRTRoleError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DisassociateDRTRoleError {
fn from(err: CredentialsError) -> DisassociateDRTRoleError {
DisassociateDRTRoleError::Credentials(err)
}
}
impl From<HttpDispatchError> for DisassociateDRTRoleError {
fn from(err: HttpDispatchError) -> DisassociateDRTRoleError {
DisassociateDRTRoleError::HttpDispatch(err)
}
}
impl From<io::Error> for DisassociateDRTRoleError {
fn from(err: io::Error) -> DisassociateDRTRoleError {
DisassociateDRTRoleError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DisassociateDRTRoleError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DisassociateDRTRoleError {
fn description(&self) -> &str {
match *self {
DisassociateDRTRoleError::InternalError(ref cause) => cause,
DisassociateDRTRoleError::InvalidOperation(ref cause) => cause,
DisassociateDRTRoleError::OptimisticLock(ref cause) => cause,
DisassociateDRTRoleError::ResourceNotFound(ref cause) => cause,
DisassociateDRTRoleError::Validation(ref cause) => cause,
DisassociateDRTRoleError::Credentials(ref err) => err.description(),
DisassociateDRTRoleError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
DisassociateDRTRoleError::ParseError(ref cause) => cause,
DisassociateDRTRoleError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by GetSubscriptionState
#[derive(Debug, PartialEq)]
pub enum GetSubscriptionStateError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl GetSubscriptionStateError {
pub fn from_response(res: BufferedHttpResponse) -> GetSubscriptionStateError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return GetSubscriptionStateError::InternalError(String::from(error_message))
}
"ValidationException" => {
return GetSubscriptionStateError::Validation(error_message.to_string())
}
_ => {}
}
}
return GetSubscriptionStateError::Unknown(res);
}
}
impl From<serde_json::error::Error> for GetSubscriptionStateError {
fn from(err: serde_json::error::Error) -> GetSubscriptionStateError {
GetSubscriptionStateError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for GetSubscriptionStateError {
fn from(err: CredentialsError) -> GetSubscriptionStateError {
GetSubscriptionStateError::Credentials(err)
}
}
impl From<HttpDispatchError> for GetSubscriptionStateError {
fn from(err: HttpDispatchError) -> GetSubscriptionStateError {
GetSubscriptionStateError::HttpDispatch(err)
}
}
impl From<io::Error> for GetSubscriptionStateError {
fn from(err: io::Error) -> GetSubscriptionStateError {
GetSubscriptionStateError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for GetSubscriptionStateError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetSubscriptionStateError {
fn description(&self) -> &str {
match *self {
GetSubscriptionStateError::InternalError(ref cause) => cause,
GetSubscriptionStateError::Validation(ref cause) => cause,
GetSubscriptionStateError::Credentials(ref err) => err.description(),
GetSubscriptionStateError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
GetSubscriptionStateError::ParseError(ref cause) => cause,
GetSubscriptionStateError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by ListAttacks
#[derive(Debug, PartialEq)]
pub enum ListAttacksError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception that indicates that the operation would not cause any change to occur.</p>
InvalidOperation(String),
/// <p>Exception that indicates that the parameters passed to the API are invalid. </p>
InvalidParameter(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl ListAttacksError {
pub fn from_response(res: BufferedHttpResponse) -> ListAttacksError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return ListAttacksError::InternalError(String::from(error_message))
}
"InvalidOperationException" => {
return ListAttacksError::InvalidOperation(String::from(error_message))
}
"InvalidParameterException" => {
return ListAttacksError::InvalidParameter(String::from(error_message))
}
"ValidationException" => {
return ListAttacksError::Validation(error_message.to_string())
}
_ => {}
}
}
return ListAttacksError::Unknown(res);
}
}
impl From<serde_json::error::Error> for ListAttacksError {
fn from(err: serde_json::error::Error) -> ListAttacksError {
ListAttacksError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for ListAttacksError {
fn from(err: CredentialsError) -> ListAttacksError {
ListAttacksError::Credentials(err)
}
}
impl From<HttpDispatchError> for ListAttacksError {
fn from(err: HttpDispatchError) -> ListAttacksError {
ListAttacksError::HttpDispatch(err)
}
}
impl From<io::Error> for ListAttacksError {
fn from(err: io::Error) -> ListAttacksError {
ListAttacksError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for ListAttacksError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListAttacksError {
fn description(&self) -> &str {
match *self {
ListAttacksError::InternalError(ref cause) => cause,
ListAttacksError::InvalidOperation(ref cause) => cause,
ListAttacksError::InvalidParameter(ref cause) => cause,
ListAttacksError::Validation(ref cause) => cause,
ListAttacksError::Credentials(ref err) => err.description(),
ListAttacksError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
ListAttacksError::ParseError(ref cause) => cause,
ListAttacksError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by ListProtections
#[derive(Debug, PartialEq)]
pub enum ListProtectionsError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception that indicates that the NextToken specified in the request is invalid. Submit the request using the NextToken value that was returned in the response.</p>
InvalidPaginationToken(String),
/// <p>Exception indicating the specified resource does not exist.</p>
ResourceNotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl ListProtectionsError {
pub fn from_response(res: BufferedHttpResponse) -> ListProtectionsError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return ListProtectionsError::InternalError(String::from(error_message))
}
"InvalidPaginationTokenException" => {
return ListProtectionsError::InvalidPaginationToken(String::from(error_message))
}
"ResourceNotFoundException" => {
return ListProtectionsError::ResourceNotFound(String::from(error_message))
}
"ValidationException" => {
return ListProtectionsError::Validation(error_message.to_string())
}
_ => {}
}
}
return ListProtectionsError::Unknown(res);
}
}
impl From<serde_json::error::Error> for ListProtectionsError {
fn from(err: serde_json::error::Error) -> ListProtectionsError {
ListProtectionsError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for ListProtectionsError {
fn from(err: CredentialsError) -> ListProtectionsError {
ListProtectionsError::Credentials(err)
}
}
impl From<HttpDispatchError> for ListProtectionsError {
fn from(err: HttpDispatchError) -> ListProtectionsError {
ListProtectionsError::HttpDispatch(err)
}
}
impl From<io::Error> for ListProtectionsError {
fn from(err: io::Error) -> ListProtectionsError {
ListProtectionsError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for ListProtectionsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for ListProtectionsError {
fn description(&self) -> &str {
match *self {
ListProtectionsError::InternalError(ref cause) => cause,
ListProtectionsError::InvalidPaginationToken(ref cause) => cause,
ListProtectionsError::ResourceNotFound(ref cause) => cause,
ListProtectionsError::Validation(ref cause) => cause,
ListProtectionsError::Credentials(ref err) => err.description(),
ListProtectionsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
ListProtectionsError::ParseError(ref cause) => cause,
ListProtectionsError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by UpdateEmergencyContactSettings
#[derive(Debug, PartialEq)]
pub enum UpdateEmergencyContactSettingsError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception that indicates that the parameters passed to the API are invalid. </p>
InvalidParameter(String),
/// <p>Exception that indicates that the protection state has been modified by another client. You can retry the request.</p>
OptimisticLock(String),
/// <p>Exception indicating the specified resource does not exist.</p>
ResourceNotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl UpdateEmergencyContactSettingsError {
pub fn from_response(res: BufferedHttpResponse) -> UpdateEmergencyContactSettingsError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return UpdateEmergencyContactSettingsError::InternalError(String::from(
error_message,
))
}
"InvalidParameterException" => {
return UpdateEmergencyContactSettingsError::InvalidParameter(String::from(
error_message,
))
}
"OptimisticLockException" => {
return UpdateEmergencyContactSettingsError::OptimisticLock(String::from(
error_message,
))
}
"ResourceNotFoundException" => {
return UpdateEmergencyContactSettingsError::ResourceNotFound(String::from(
error_message,
))
}
"ValidationException" => {
return UpdateEmergencyContactSettingsError::Validation(
error_message.to_string(),
)
}
_ => {}
}
}
return UpdateEmergencyContactSettingsError::Unknown(res);
}
}
impl From<serde_json::error::Error> for UpdateEmergencyContactSettingsError {
fn from(err: serde_json::error::Error) -> UpdateEmergencyContactSettingsError {
UpdateEmergencyContactSettingsError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for UpdateEmergencyContactSettingsError {
fn from(err: CredentialsError) -> UpdateEmergencyContactSettingsError {
UpdateEmergencyContactSettingsError::Credentials(err)
}
}
impl From<HttpDispatchError> for UpdateEmergencyContactSettingsError {
fn from(err: HttpDispatchError) -> UpdateEmergencyContactSettingsError {
UpdateEmergencyContactSettingsError::HttpDispatch(err)
}
}
impl From<io::Error> for UpdateEmergencyContactSettingsError {
fn from(err: io::Error) -> UpdateEmergencyContactSettingsError {
UpdateEmergencyContactSettingsError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for UpdateEmergencyContactSettingsError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateEmergencyContactSettingsError {
fn description(&self) -> &str {
match *self {
UpdateEmergencyContactSettingsError::InternalError(ref cause) => cause,
UpdateEmergencyContactSettingsError::InvalidParameter(ref cause) => cause,
UpdateEmergencyContactSettingsError::OptimisticLock(ref cause) => cause,
UpdateEmergencyContactSettingsError::ResourceNotFound(ref cause) => cause,
UpdateEmergencyContactSettingsError::Validation(ref cause) => cause,
UpdateEmergencyContactSettingsError::Credentials(ref err) => err.description(),
UpdateEmergencyContactSettingsError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
UpdateEmergencyContactSettingsError::ParseError(ref cause) => cause,
UpdateEmergencyContactSettingsError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by UpdateSubscription
#[derive(Debug, PartialEq)]
pub enum UpdateSubscriptionError {
/// <p>Exception that indicates that a problem occurred with the service infrastructure. You can retry the request.</p>
InternalError(String),
/// <p>Exception that indicates that the parameters passed to the API are invalid. </p>
InvalidParameter(String),
/// <p>You are trying to update a subscription that has not yet completed the 1-year commitment. You can change the <code>AutoRenew</code> parameter during the last 30 days of your subscription. This exception indicates that you are attempting to change <code>AutoRenew</code> prior to that period.</p>
LockedSubscription(String),
/// <p>Exception that indicates that the protection state has been modified by another client. You can retry the request.</p>
OptimisticLock(String),
/// <p>Exception indicating the specified resource does not exist.</p>
ResourceNotFound(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl UpdateSubscriptionError {
pub fn from_response(res: BufferedHttpResponse) -> UpdateSubscriptionError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let raw_error_type = json
.get("__type")
.and_then(|e| e.as_str())
.unwrap_or("Unknown");
let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or("");
let pieces: Vec<&str> = raw_error_type.split("#").collect();
let error_type = pieces.last().expect("Expected error type");
match *error_type {
"InternalErrorException" => {
return UpdateSubscriptionError::InternalError(String::from(error_message))
}
"InvalidParameterException" => {
return UpdateSubscriptionError::InvalidParameter(String::from(error_message))
}
"LockedSubscriptionException" => {
return UpdateSubscriptionError::LockedSubscription(String::from(error_message))
}
"OptimisticLockException" => {
return UpdateSubscriptionError::OptimisticLock(String::from(error_message))
}
"ResourceNotFoundException" => {
return UpdateSubscriptionError::ResourceNotFound(String::from(error_message))
}
"ValidationException" => {
return UpdateSubscriptionError::Validation(error_message.to_string())
}
_ => {}
}
}
return UpdateSubscriptionError::Unknown(res);
}
}
impl From<serde_json::error::Error> for UpdateSubscriptionError {
fn from(err: serde_json::error::Error) -> UpdateSubscriptionError {
UpdateSubscriptionError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for UpdateSubscriptionError {
fn from(err: CredentialsError) -> UpdateSubscriptionError {
UpdateSubscriptionError::Credentials(err)
}
}
impl From<HttpDispatchError> for UpdateSubscriptionError {
fn from(err: HttpDispatchError) -> UpdateSubscriptionError {
UpdateSubscriptionError::HttpDispatch(err)
}
}
impl From<io::Error> for UpdateSubscriptionError {
fn from(err: io::Error) -> UpdateSubscriptionError {
UpdateSubscriptionError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for UpdateSubscriptionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateSubscriptionError {
fn description(&self) -> &str {
match *self {
UpdateSubscriptionError::InternalError(ref cause) => cause,
UpdateSubscriptionError::InvalidParameter(ref cause) => cause,
UpdateSubscriptionError::LockedSubscription(ref cause) => cause,
UpdateSubscriptionError::OptimisticLock(ref cause) => cause,
UpdateSubscriptionError::ResourceNotFound(ref cause) => cause,
UpdateSubscriptionError::Validation(ref cause) => cause,
UpdateSubscriptionError::Credentials(ref err) => err.description(),
UpdateSubscriptionError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
UpdateSubscriptionError::ParseError(ref cause) => cause,
UpdateSubscriptionError::Unknown(_) => "unknown error",
}
}
}
/// Trait representing the capabilities of the AWS Shield API. AWS Shield clients implement this trait.
pub trait Shield {
/// <p>Authorizes the DDoS Response team (DRT) to access the specified Amazon S3 bucket containing your flow logs. You can associate up to 10 Amazon S3 buckets with your subscription.</p> <p>To use the services of the DRT and make an <code>AssociateDRTLogBucket</code> request, you must be subscribed to the <a href="https://aws.amazon.com/premiumsupport/business-support/">Business Support plan</a> or the <a href="https://aws.amazon.com/premiumsupport/enterprise-support/">Enterprise Support plan</a>.</p>
fn associate_drt_log_bucket(
&self,
input: AssociateDRTLogBucketRequest,
) -> RusotoFuture<AssociateDRTLogBucketResponse, AssociateDRTLogBucketError>;
/// <p>Authorizes the DDoS Response team (DRT), using the specified role, to access your AWS account to assist with DDoS attack mitigation during potential attacks. This enables the DRT to inspect your AWS WAF configuration and create or update AWS WAF rules and web ACLs.</p> <p>You can associate only one <code>RoleArn</code> with your subscription. If you submit an <code>AssociateDRTRole</code> request for an account that already has an associated role, the new <code>RoleArn</code> will replace the existing <code>RoleArn</code>. </p> <p>Prior to making the <code>AssociateDRTRole</code> request, you must attach the <a href="https://console.aws.amazon.com/iam/home?#/policies/arn:aws:iam::aws:policy/service-role/AWSShieldDRTAccessPolicy">AWSShieldDRTAccessPolicy</a> managed policy to the role you will specify in the request. For more information see <a href=" https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_manage-attach-detach.html">Attaching and Detaching IAM Policies</a>. The role must also trust the service principal <code> drt.shield.amazonaws.com</code>. For more information, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html">IAM JSON Policy Elements: Principal</a>.</p> <p>The DRT will have access only to your AWS WAF and Shield resources. By submitting this request, you authorize the DRT to inspect your AWS WAF and Shield configuration and create and update AWS WAF rules and web ACLs on your behalf. The DRT takes these actions only if explicitly authorized by you.</p> <p>You must have the <code>iam:PassRole</code> permission to make an <code>AssociateDRTRole</code> request. For more information, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_passrole.html">Granting a User Permissions to Pass a Role to an AWS Service</a>. </p> <p>To use the services of the DRT and make an <code>AssociateDRTRole</code> request, you must be subscribed to the <a href="https://aws.amazon.com/premiumsupport/business-support/">Business Support plan</a> or the <a href="https://aws.amazon.com/premiumsupport/enterprise-support/">Enterprise Support plan</a>.</p>
fn associate_drt_role(
&self,
input: AssociateDRTRoleRequest,
) -> RusotoFuture<AssociateDRTRoleResponse, AssociateDRTRoleError>;
/// <p>Enables AWS Shield Advanced for a specific AWS resource. The resource can be an Amazon CloudFront distribution, Elastic Load Balancing load balancer, Elastic IP Address, or an Amazon Route 53 hosted zone.</p> <p>You can add protection to only a single resource with each CreateProtection request. If you want to add protection to multiple resources at once, use the <a href="https://console.aws.amazon.com/waf/">AWS WAF console</a>. For more information see <a href="https://docs.aws.amazon.com/waf/latest/developerguide/getting-started-ddos.html">Getting Started with AWS Shield Advanced</a> and <a href="https://docs.aws.amazon.com/waf/latest/developerguide/configure-new-protection.html">Add AWS Shield Advanced Protection to more AWS Resources</a>.</p>
fn create_protection(
&self,
input: CreateProtectionRequest,
) -> RusotoFuture<CreateProtectionResponse, CreateProtectionError>;
/// <p>Activates AWS Shield Advanced for an account.</p> <p>As part of this request you can specify <code>EmergencySettings</code> that automaticaly grant the DDoS response team (DRT) needed permissions to assist you during a suspected DDoS attack. For more information see <a href="https://docs.aws.amazon.com/waf/latest/developerguide/authorize-DRT.html">Authorize the DDoS Response Team to Create Rules and Web ACLs on Your Behalf</a>.</p> <p>When you initally create a subscription, your subscription is set to be automatically renewed at the end of the existing subscription period. You can change this by submitting an <code>UpdateSubscription</code> request. </p>
fn create_subscription(
&self,
) -> RusotoFuture<CreateSubscriptionResponse, CreateSubscriptionError>;
/// <p>Deletes an AWS Shield Advanced <a>Protection</a>.</p>
fn delete_protection(
&self,
input: DeleteProtectionRequest,
) -> RusotoFuture<DeleteProtectionResponse, DeleteProtectionError>;
/// <p>Removes AWS Shield Advanced from an account. AWS Shield Advanced requires a 1-year subscription commitment. You cannot delete a subscription prior to the completion of that commitment. </p>
fn delete_subscription(
&self,
) -> RusotoFuture<DeleteSubscriptionResponse, DeleteSubscriptionError>;
/// <p>Describes the details of a DDoS attack. </p>
fn describe_attack(
&self,
input: DescribeAttackRequest,
) -> RusotoFuture<DescribeAttackResponse, DescribeAttackError>;
/// <p>Returns the current role and list of Amazon S3 log buckets used by the DDoS Response team (DRT) to access your AWS account while assisting with attack mitigation.</p>
fn describe_drt_access(
&self,
) -> RusotoFuture<DescribeDRTAccessResponse, DescribeDRTAccessError>;
/// <p>Lists the email addresses that the DRT can use to contact you during a suspected attack.</p>
fn describe_emergency_contact_settings(
&self,
) -> RusotoFuture<DescribeEmergencyContactSettingsResponse, DescribeEmergencyContactSettingsError>;
/// <p>Lists the details of a <a>Protection</a> object.</p>
fn describe_protection(
&self,
input: DescribeProtectionRequest,
) -> RusotoFuture<DescribeProtectionResponse, DescribeProtectionError>;
/// <p>Provides details about the AWS Shield Advanced subscription for an account.</p>
fn describe_subscription(
&self,
) -> RusotoFuture<DescribeSubscriptionResponse, DescribeSubscriptionError>;
/// <p>Removes the DDoS Response team's (DRT) access to the specified Amazon S3 bucket containing your flow logs.</p> <p>To make a <code>DisassociateDRTLogBucket</code> request, you must be subscribed to the <a href="https://aws.amazon.com/premiumsupport/business-support/">Business Support plan</a> or the <a href="https://aws.amazon.com/premiumsupport/enterprise-support/">Enterprise Support plan</a>. However, if you are not subscribed to one of these support plans, but had been previously and had granted the DRT access to your account, you can submit a <code>DisassociateDRTLogBucket</code> request to remove this access.</p>
fn disassociate_drt_log_bucket(
&self,
input: DisassociateDRTLogBucketRequest,
) -> RusotoFuture<DisassociateDRTLogBucketResponse, DisassociateDRTLogBucketError>;
/// <p>Removes the DDoS Response team's (DRT) access to your AWS account.</p> <p>To make a <code>DisassociateDRTRole</code> request, you must be subscribed to the <a href="https://aws.amazon.com/premiumsupport/business-support/">Business Support plan</a> or the <a href="https://aws.amazon.com/premiumsupport/enterprise-support/">Enterprise Support plan</a>. However, if you are not subscribed to one of these support plans, but had been previously and had granted the DRT access to your account, you can submit a <code>DisassociateDRTRole</code> request to remove this access.</p>
fn disassociate_drt_role(
&self,
) -> RusotoFuture<DisassociateDRTRoleResponse, DisassociateDRTRoleError>;
/// <p>Returns the <code>SubscriptionState</code>, either <code>Active</code> or <code>Inactive</code>.</p>
fn get_subscription_state(
&self,
) -> RusotoFuture<GetSubscriptionStateResponse, GetSubscriptionStateError>;
/// <p>Returns all ongoing DDoS attacks or all DDoS attacks during a specified time period.</p>
fn list_attacks(
&self,
input: ListAttacksRequest,
) -> RusotoFuture<ListAttacksResponse, ListAttacksError>;
/// <p>Lists all <a>Protection</a> objects for the account.</p>
fn list_protections(
&self,
input: ListProtectionsRequest,
) -> RusotoFuture<ListProtectionsResponse, ListProtectionsError>;
/// <p>Updates the details of the list of email addresses that the DRT can use to contact you during a suspected attack.</p>
fn update_emergency_contact_settings(
&self,
input: UpdateEmergencyContactSettingsRequest,
) -> RusotoFuture<UpdateEmergencyContactSettingsResponse, UpdateEmergencyContactSettingsError>;
/// <p>Updates the details of an existing subscription. Only enter values for parameters you want to change. Empty parameters are not updated.</p>
fn update_subscription(
&self,
input: UpdateSubscriptionRequest,
) -> RusotoFuture<UpdateSubscriptionResponse, UpdateSubscriptionError>;
}
/// A client for the AWS Shield API.
pub struct ShieldClient {
client: Client,
region: region::Region,
}
impl ShieldClient {
/// Creates a client backed by the default tokio event loop.
///
/// The client will use the default credentials provider and tls client.
pub fn new(region: region::Region) -> ShieldClient {
ShieldClient {
client: Client::shared(),
region: region,
}
}
pub fn new_with<P, D>(
request_dispatcher: D,
credentials_provider: P,
region: region::Region,
) -> ShieldClient
where
P: ProvideAwsCredentials + Send + Sync + 'static,
P::Future: Send,
D: DispatchSignedRequest + Send + Sync + 'static,
D::Future: Send,
{
ShieldClient {
client: Client::new_with(credentials_provider, request_dispatcher),
region: region,
}
}
}
impl Shield for ShieldClient {
/// <p>Authorizes the DDoS Response team (DRT) to access the specified Amazon S3 bucket containing your flow logs. You can associate up to 10 Amazon S3 buckets with your subscription.</p> <p>To use the services of the DRT and make an <code>AssociateDRTLogBucket</code> request, you must be subscribed to the <a href="https://aws.amazon.com/premiumsupport/business-support/">Business Support plan</a> or the <a href="https://aws.amazon.com/premiumsupport/enterprise-support/">Enterprise Support plan</a>.</p>
fn associate_drt_log_bucket(
&self,
input: AssociateDRTLogBucketRequest,
) -> RusotoFuture<AssociateDRTLogBucketResponse, AssociateDRTLogBucketError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.AssociateDRTLogBucket");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded.into_bytes()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<AssociateDRTLogBucketResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(AssociateDRTLogBucketError::from_response(response))
}),
)
}
})
}
/// <p>Authorizes the DDoS Response team (DRT), using the specified role, to access your AWS account to assist with DDoS attack mitigation during potential attacks. This enables the DRT to inspect your AWS WAF configuration and create or update AWS WAF rules and web ACLs.</p> <p>You can associate only one <code>RoleArn</code> with your subscription. If you submit an <code>AssociateDRTRole</code> request for an account that already has an associated role, the new <code>RoleArn</code> will replace the existing <code>RoleArn</code>. </p> <p>Prior to making the <code>AssociateDRTRole</code> request, you must attach the <a href="https://console.aws.amazon.com/iam/home?#/policies/arn:aws:iam::aws:policy/service-role/AWSShieldDRTAccessPolicy">AWSShieldDRTAccessPolicy</a> managed policy to the role you will specify in the request. For more information see <a href=" https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_manage-attach-detach.html">Attaching and Detaching IAM Policies</a>. The role must also trust the service principal <code> drt.shield.amazonaws.com</code>. For more information, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html">IAM JSON Policy Elements: Principal</a>.</p> <p>The DRT will have access only to your AWS WAF and Shield resources. By submitting this request, you authorize the DRT to inspect your AWS WAF and Shield configuration and create and update AWS WAF rules and web ACLs on your behalf. The DRT takes these actions only if explicitly authorized by you.</p> <p>You must have the <code>iam:PassRole</code> permission to make an <code>AssociateDRTRole</code> request. For more information, see <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_passrole.html">Granting a User Permissions to Pass a Role to an AWS Service</a>. </p> <p>To use the services of the DRT and make an <code>AssociateDRTRole</code> request, you must be subscribed to the <a href="https://aws.amazon.com/premiumsupport/business-support/">Business Support plan</a> or the <a href="https://aws.amazon.com/premiumsupport/enterprise-support/">Enterprise Support plan</a>.</p>
fn associate_drt_role(
&self,
input: AssociateDRTRoleRequest,
) -> RusotoFuture<AssociateDRTRoleResponse, AssociateDRTRoleError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.AssociateDRTRole");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded.into_bytes()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<AssociateDRTRoleResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(AssociateDRTRoleError::from_response(response))),
)
}
})
}
/// <p>Enables AWS Shield Advanced for a specific AWS resource. The resource can be an Amazon CloudFront distribution, Elastic Load Balancing load balancer, Elastic IP Address, or an Amazon Route 53 hosted zone.</p> <p>You can add protection to only a single resource with each CreateProtection request. If you want to add protection to multiple resources at once, use the <a href="https://console.aws.amazon.com/waf/">AWS WAF console</a>. For more information see <a href="https://docs.aws.amazon.com/waf/latest/developerguide/getting-started-ddos.html">Getting Started with AWS Shield Advanced</a> and <a href="https://docs.aws.amazon.com/waf/latest/developerguide/configure-new-protection.html">Add AWS Shield Advanced Protection to more AWS Resources</a>.</p>
fn create_protection(
&self,
input: CreateProtectionRequest,
) -> RusotoFuture<CreateProtectionResponse, CreateProtectionError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.CreateProtection");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded.into_bytes()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<CreateProtectionResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(CreateProtectionError::from_response(response))),
)
}
})
}
/// <p>Activates AWS Shield Advanced for an account.</p> <p>As part of this request you can specify <code>EmergencySettings</code> that automaticaly grant the DDoS response team (DRT) needed permissions to assist you during a suspected DDoS attack. For more information see <a href="https://docs.aws.amazon.com/waf/latest/developerguide/authorize-DRT.html">Authorize the DDoS Response Team to Create Rules and Web ACLs on Your Behalf</a>.</p> <p>When you initally create a subscription, your subscription is set to be automatically renewed at the end of the existing subscription period. You can change this by submitting an <code>UpdateSubscription</code> request. </p>
fn create_subscription(
&self,
) -> RusotoFuture<CreateSubscriptionResponse, CreateSubscriptionError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.CreateSubscription");
request.set_payload(Some(b"{}".to_vec()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<CreateSubscriptionResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(CreateSubscriptionError::from_response(response))),
)
}
})
}
/// <p>Deletes an AWS Shield Advanced <a>Protection</a>.</p>
fn delete_protection(
&self,
input: DeleteProtectionRequest,
) -> RusotoFuture<DeleteProtectionResponse, DeleteProtectionError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.DeleteProtection");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded.into_bytes()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<DeleteProtectionResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DeleteProtectionError::from_response(response))),
)
}
})
}
/// <p>Removes AWS Shield Advanced from an account. AWS Shield Advanced requires a 1-year subscription commitment. You cannot delete a subscription prior to the completion of that commitment. </p>
fn delete_subscription(
&self,
) -> RusotoFuture<DeleteSubscriptionResponse, DeleteSubscriptionError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.DeleteSubscription");
request.set_payload(Some(b"{}".to_vec()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<DeleteSubscriptionResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DeleteSubscriptionError::from_response(response))),
)
}
})
}
/// <p>Describes the details of a DDoS attack. </p>
fn describe_attack(
&self,
input: DescribeAttackRequest,
) -> RusotoFuture<DescribeAttackResponse, DescribeAttackError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.DescribeAttack");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded.into_bytes()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<DescribeAttackResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DescribeAttackError::from_response(response))),
)
}
})
}
/// <p>Returns the current role and list of Amazon S3 log buckets used by the DDoS Response team (DRT) to access your AWS account while assisting with attack mitigation.</p>
fn describe_drt_access(
&self,
) -> RusotoFuture<DescribeDRTAccessResponse, DescribeDRTAccessError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.DescribeDRTAccess");
request.set_payload(Some(b"{}".to_vec()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<DescribeDRTAccessResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DescribeDRTAccessError::from_response(response))),
)
}
})
}
/// <p>Lists the email addresses that the DRT can use to contact you during a suspected attack.</p>
fn describe_emergency_contact_settings(
&self,
) -> RusotoFuture<DescribeEmergencyContactSettingsResponse, DescribeEmergencyContactSettingsError>
{
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header(
"x-amz-target",
"AWSShield_20160616.DescribeEmergencyContactSettings",
);
request.set_payload(Some(b"{}".to_vec()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<DescribeEmergencyContactSettingsResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(DescribeEmergencyContactSettingsError::from_response(
response,
))
}))
}
})
}
/// <p>Lists the details of a <a>Protection</a> object.</p>
fn describe_protection(
&self,
input: DescribeProtectionRequest,
) -> RusotoFuture<DescribeProtectionResponse, DescribeProtectionError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.DescribeProtection");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded.into_bytes()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<DescribeProtectionResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DescribeProtectionError::from_response(response))),
)
}
})
}
/// <p>Provides details about the AWS Shield Advanced subscription for an account.</p>
fn describe_subscription(
&self,
) -> RusotoFuture<DescribeSubscriptionResponse, DescribeSubscriptionError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.DescribeSubscription");
request.set_payload(Some(b"{}".to_vec()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<DescribeSubscriptionResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(DescribeSubscriptionError::from_response(response))
}),
)
}
})
}
/// <p>Removes the DDoS Response team's (DRT) access to the specified Amazon S3 bucket containing your flow logs.</p> <p>To make a <code>DisassociateDRTLogBucket</code> request, you must be subscribed to the <a href="https://aws.amazon.com/premiumsupport/business-support/">Business Support plan</a> or the <a href="https://aws.amazon.com/premiumsupport/enterprise-support/">Enterprise Support plan</a>. However, if you are not subscribed to one of these support plans, but had been previously and had granted the DRT access to your account, you can submit a <code>DisassociateDRTLogBucket</code> request to remove this access.</p>
fn disassociate_drt_log_bucket(
&self,
input: DisassociateDRTLogBucketRequest,
) -> RusotoFuture<DisassociateDRTLogBucketResponse, DisassociateDRTLogBucketError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header(
"x-amz-target",
"AWSShield_20160616.DisassociateDRTLogBucket",
);
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded.into_bytes()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<DisassociateDRTLogBucketResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(DisassociateDRTLogBucketError::from_response(response))
}))
}
})
}
/// <p>Removes the DDoS Response team's (DRT) access to your AWS account.</p> <p>To make a <code>DisassociateDRTRole</code> request, you must be subscribed to the <a href="https://aws.amazon.com/premiumsupport/business-support/">Business Support plan</a> or the <a href="https://aws.amazon.com/premiumsupport/enterprise-support/">Enterprise Support plan</a>. However, if you are not subscribed to one of these support plans, but had been previously and had granted the DRT access to your account, you can submit a <code>DisassociateDRTRole</code> request to remove this access.</p>
fn disassociate_drt_role(
&self,
) -> RusotoFuture<DisassociateDRTRoleResponse, DisassociateDRTRoleError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.DisassociateDRTRole");
request.set_payload(Some(b"{}".to_vec()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<DisassociateDRTRoleResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(DisassociateDRTRoleError::from_response(response))
}),
)
}
})
}
/// <p>Returns the <code>SubscriptionState</code>, either <code>Active</code> or <code>Inactive</code>.</p>
fn get_subscription_state(
&self,
) -> RusotoFuture<GetSubscriptionStateResponse, GetSubscriptionStateError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.GetSubscriptionState");
request.set_payload(Some(b"{}".to_vec()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<GetSubscriptionStateResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response.buffer().from_err().and_then(|response| {
Err(GetSubscriptionStateError::from_response(response))
}),
)
}
})
}
/// <p>Returns all ongoing DDoS attacks or all DDoS attacks during a specified time period.</p>
fn list_attacks(
&self,
input: ListAttacksRequest,
) -> RusotoFuture<ListAttacksResponse, ListAttacksError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.ListAttacks");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded.into_bytes()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<ListAttacksResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListAttacksError::from_response(response))),
)
}
})
}
/// <p>Lists all <a>Protection</a> objects for the account.</p>
fn list_protections(
&self,
input: ListProtectionsRequest,
) -> RusotoFuture<ListProtectionsResponse, ListProtectionsError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.ListProtections");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded.into_bytes()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<ListProtectionsResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(ListProtectionsError::from_response(response))),
)
}
})
}
/// <p>Updates the details of the list of email addresses that the DRT can use to contact you during a suspected attack.</p>
fn update_emergency_contact_settings(
&self,
input: UpdateEmergencyContactSettingsRequest,
) -> RusotoFuture<UpdateEmergencyContactSettingsResponse, UpdateEmergencyContactSettingsError>
{
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header(
"x-amz-target",
"AWSShield_20160616.UpdateEmergencyContactSettings",
);
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded.into_bytes()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<UpdateEmergencyContactSettingsResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(response.buffer().from_err().and_then(|response| {
Err(UpdateEmergencyContactSettingsError::from_response(response))
}))
}
})
}
/// <p>Updates the details of an existing subscription. Only enter values for parameters you want to change. Empty parameters are not updated.</p>
fn update_subscription(
&self,
input: UpdateSubscriptionRequest,
) -> RusotoFuture<UpdateSubscriptionResponse, UpdateSubscriptionError> {
let mut request = SignedRequest::new("POST", "shield", &self.region, "/");
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.add_header("x-amz-target", "AWSShield_20160616.UpdateSubscription");
let encoded = serde_json::to_string(&input).unwrap();
request.set_payload(Some(encoded.into_bytes()));
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut body = response.body;
if body.is_empty() || body == b"null" {
body = b"{}".to_vec();
}
serde_json::from_str::<UpdateSubscriptionResponse>(
String::from_utf8_lossy(body.as_ref()).as_ref(),
)
.unwrap()
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(UpdateSubscriptionError::from_response(response))),
)
}
})
}
}
#[cfg(test)]
mod protocol_tests {}
| CreateSubscriptionResponse |
data-guards.ts | export function | (value: unknown): value is number {
return value !== null && typeof value === 'number' && !Number.isNaN(value) && Number.isFinite(value)
}
| isValidNumber |
data.go | // Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package osd_tree
import (
"encoding/json"
"strconv"
"strings"
"github.com/elastic/elastic-agent-libs/logp"
"github.com/elastic/elastic-agent-libs/mapstr"
)
// Node represents a node object
type Node struct {
ID int64 `json:"id"`
Name string `json:"name"`
Type string `json:"type"`
TypeID int64 `json:"type_id"`
Children []int64 `json:"children"`
CrushWeight float64 `json:"crush_weight"`
Depth int64 `json:"depth"`
Exist int64 `json:"exists"`
PrimaryAffinity float64 `json:"primary_affinity"`
Reweight float64 `json:"reweight"`
Status string `json:"status"`
DeviceClass string `json:"device_class"`
}
// Output contains a node list from the df response
type Output struct {
Nodes []Node `json:"nodes"`
}
// OsdTreeRequest is a OSD response object
type OsdTreeRequest struct {
Status string `json:"status"`
Output Output `json:"output"`
}
func eventsMapping(content []byte) ([]mapstr.M, error) | {
var d OsdTreeRequest
err := json.Unmarshal(content, &d)
if err != nil {
logp.Err("Error: %+v", err)
return nil, err
}
nodeList := d.Output.Nodes
//generate fatherNode and children map
fatherMap := make(map[string]string)
childrenMap := make(map[string]string)
for _, node := range nodeList {
if node.ID >= 0 {
//it's osd node
continue
}
childrenList := []string{}
for _, child := range node.Children {
childIDStr := strconv.FormatInt(child, 10)
childrenList = append(childrenList, childIDStr)
fatherMap[childIDStr] = node.Name
}
//generate bucket node's children list
childrenMap[node.Name] = strings.Join(childrenList, ",")
}
//osd node list
events := []mapstr.M{}
for _, node := range nodeList {
nodeInfo := mapstr.M{}
if node.ID < 0 {
//bucket node
nodeInfo["children"] = strings.Split(childrenMap[node.Name], ",")
} else {
//osd node
nodeInfo["crush_weight"] = node.CrushWeight
nodeInfo["depth"] = node.Depth
nodeInfo["primary_affinity"] = node.PrimaryAffinity
nodeInfo["reweight"] = node.Reweight
nodeInfo["status"] = node.Status
nodeInfo["device_class"] = node.DeviceClass
if node.Exist > 0 {
nodeInfo["exists"] = true
} else {
nodeInfo["exists"] = false
}
}
nodeInfo["id"] = node.ID
nodeInfo["name"] = node.Name
nodeInfo["type"] = node.Type
nodeInfo["type_id"] = node.TypeID
idStr := strconv.FormatInt(node.ID, 10)
nodeInfo["father"] = fatherMap[idStr]
events = append(events, nodeInfo)
}
return events, nil
} |
|
timeoutstore_test.go | package redis_test
import (
"strconv"
"testing"
"time"
"github.com/alicebob/miniredis"
"github.com/soapboxsocial/soapbox/pkg/conf"
"github.com/soapboxsocial/soapbox/pkg/redis"
)
func TestTimeoutStore(t *testing.T) {
mr, err := miniredis.Run()
if err != nil {
t.Fatal(err)
}
port, err := strconv.Atoi(mr.Port())
if err != nil {
t.Fatal(err)
}
rdb := redis.NewRedis(conf.RedisConf{
Port: port,
Host: mr.Host(),
DisableTLS: true, | key := "foo"
if ts.IsOnTimeout(key) {
t.Fatal("should not be on timeout")
}
err = ts.SetTimeout(key, 5*time.Minute)
if err != nil {
t.Fatal(err)
}
if !ts.IsOnTimeout(key) {
t.Fatal("key is on timeout")
}
} | })
ts := redis.NewTimeoutStore(rdb)
|
block.ts | import { LoggifyClass } from '../decorators/Loggify';
import logger from '../logger';
import { StorageService } from '../services/storage';
import { SpentHeightIndicators } from '../types/Coin';
import { BitcoinBlockType, BitcoinHeaderObj } from '../types/namespaces/Bitcoin';
import { TransformOptions } from '../types/TransformOptions';
import { MongoBound } from './base';
import { BaseBlock, IBlock } from './baseBlock';
import { CoinStorage } from './coin';
import { EventStorage } from './events';
import { TransactionStorage } from './transaction';
export type iBglBlock = IBlock & {
version: number;
merkleRoot: string;
bits: number;
nonce: number;
};
@LoggifyClass
export class | extends BaseBlock<iBglBlock> {
constructor(storage?: StorageService) {
super(storage);
}
async addBlock(params: {
block: BitcoinBlockType;
parentChain?: string;
forkHeight?: number;
initialSyncComplete: boolean;
chain: string;
network: string;
}) {
const { block, chain, network } = params;
const header = block.header.toObject();
const reorg = await this.handleReorg({ header, chain, network });
if (reorg) {
return Promise.reject('reorg');
}
return this.processBlock(params);
}
async processBlock(params: {
block: BitcoinBlockType;
parentChain?: string;
forkHeight?: number;
initialSyncComplete: boolean;
chain: string;
network: string;
}) {
const { chain, network, block, parentChain, forkHeight, initialSyncComplete } = params;
const blockOp = await this.getBlockOp(params);
const convertedBlock = blockOp.updateOne.update.$set;
const { height, timeNormalized, time } = convertedBlock;
const previousBlock = await this.collection.findOne({ hash: convertedBlock.previousBlockHash, chain, network });
await this.collection.bulkWrite([blockOp]);
if (previousBlock) {
await this.collection.updateOne(
{ chain, network, hash: previousBlock.hash },
{ $set: { nextBlockHash: convertedBlock.hash } }
);
logger.debug('Updating previous block.nextBlockHash ', convertedBlock.hash);
}
await TransactionStorage.batchImport({
txs: block.transactions,
blockHash: convertedBlock.hash,
blockTime: new Date(time),
blockTimeNormalized: new Date(timeNormalized),
height,
chain,
network,
parentChain,
forkHeight,
initialSyncComplete
});
if (initialSyncComplete) {
EventStorage.signalBlock(convertedBlock);
}
await this.collection.updateOne({ hash: convertedBlock.hash, chain, network }, { $set: { processed: true } });
}
async getBlockOp(params: { block: BitcoinBlockType; chain: string; network: string }) {
const { block, chain, network } = params;
const header = block.header.toObject();
const blockTime = header.time * 1000;
const previousBlock = await this.collection.findOne({ hash: header.prevHash, chain, network });
const blockTimeNormalized = (() => {
const prevTime = previousBlock ? new Date(previousBlock.timeNormalized) : null;
if (prevTime && blockTime <= prevTime.getTime()) {
return prevTime.getTime() + 1;
} else {
return blockTime;
}
})();
const height = (previousBlock && previousBlock.height + 1) || 1;
logger.debug('Setting blockheight: ' + height);
const convertedBlock: iBglBlock = {
chain,
network,
hash: block.hash,
height,
version: header.version,
nextBlockHash: '',
previousBlockHash: header.prevHash,
merkleRoot: header.merkleRoot,
time: new Date(blockTime),
timeNormalized: new Date(blockTimeNormalized),
bits: header.bits,
nonce: header.nonce,
transactionCount: block.transactions.length,
size: block.toBuffer().length,
reward: block.transactions[0].outputAmount,
processed: false
};
return {
updateOne: {
filter: {
hash: header.hash,
chain,
network
},
update: {
$set: convertedBlock
},
upsert: true
}
};
}
async handleReorg(params: { header?: BitcoinHeaderObj; chain: string; network: string }): Promise<boolean> {
const { header, chain, network } = params;
let localTip = await this.getLocalTip(params);
if (header && localTip && localTip.hash === header.prevHash) {
return false;
}
if (!localTip || localTip.height === 0) {
return false;
}
if (header) {
const prevBlock = await this.collection.findOne({ chain, network, hash: header.prevHash });
if (prevBlock) {
localTip = prevBlock;
} else {
logger.error("Previous block isn't in the DB need to roll back until we have a block in common");
}
logger.info(`Resetting tip to ${localTip.height - 1}`, { chain, network });
}
const reorgOps = [
this.collection.deleteMany({ chain, network, height: { $gte: localTip.height } }),
TransactionStorage.collection.deleteMany({ chain, network, blockHeight: { $gte: localTip.height } }),
CoinStorage.collection.deleteMany({ chain, network, mintHeight: { $gte: localTip.height } })
];
await Promise.all(reorgOps);
await CoinStorage.collection.updateMany(
{ chain, network, spentHeight: { $gte: localTip.height } },
{ $set: { spentTxid: null, spentHeight: SpentHeightIndicators.unspent } }
);
logger.debug('Removed data from above blockHeight: ', localTip.height);
return true;
}
_apiTransform(block: Partial<MongoBound<iBglBlock>>, options?: TransformOptions): any {
const transform = {
_id: block._id,
chain: block.chain,
network: block.network,
hash: block.hash,
height: block.height,
version: block.version,
size: block.size,
merkleRoot: block.merkleRoot,
time: block.time,
timeNormalized: block.timeNormalized,
nonce: block.nonce,
bits: block.bits,
/*
*difficulty: block.difficulty,
*/
/*
*chainWork: block.chainWork,
*/
previousBlockHash: block.previousBlockHash,
nextBlockHash: block.nextBlockHash,
reward: block.reward,
/*
*isMainChain: block.mainChain,
*/
transactionCount: block.transactionCount
/*
*minedBy: BlockModel.getPoolInfo(block.minedBy)
*/
};
if (options && options.object) {
return transform;
}
return JSON.stringify(transform);
}
}
export let BitcoinBlockStorage = new BitcoinBlock();
| BitcoinBlock |
mod.rs | //! TOML document to syntax tree parsing.
use crate::{
dom::{self, FromSyntax},
syntax::{SyntaxKind, SyntaxKind::*, SyntaxNode},
util::{allowed_chars, check_escape},
};
use logos::{Lexer, Logos};
use rowan::{GreenNode, GreenNodeBuilder, TextRange, TextSize};
use std::convert::TryInto;
#[macro_use]
mod macros;
/// A syntax error that can occur during parsing.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Error {
/// The span of the error.
pub range: TextRange,
/// Human-friendly error message.
pub message: String,
}
impl core::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{} ({:?})", &self.message, &self.range)
}
}
impl std::error::Error for Error {}
/// Parse a TOML document into a [Rowan green tree](rowan::GreenNode).
///
/// The parsing will not stop at unexpected or invalid tokens.
/// Instead errors will be collected with their character offsets and lengths,
/// and the invalid token(s) will have the `ERROR` kind in the final tree.
///
/// The parser will also validate comment and string contents, looking for
/// invalid escape sequences and invalid characters.
/// These will also be reported as syntax errors.
///
/// This does not check for semantic errors such as duplicate keys.
pub fn parse(source: &str) -> Parse {
Parser::new(source).parse()
}
/// A hand-written parser that uses the Logos lexer
/// to tokenize the source, then constructs
/// a Rowan green tree from them.
pub(crate) struct Parser<'p> {
skip_whitespace: bool,
// Allow glob patterns as keys and using [] instead of dots.
key_pattern_syntax: bool,
current_token: Option<SyntaxKind>,
// These tokens are not consumed on errors.
//
// The syntax error is still reported,
// but the the surrounding context can still
// be parsed.
// FIXME(bit_flags):
// This is VERY wrong, as the members of the
// enums are not proper bit flags.
//
// However this incorrect behavior marks fewer tokens
// as errors making the parser more fault-tolerant.
// Instead of fixing this it would probably be better to
// remove the ERROR token altogether, or reserving it for
// special cases.
error_whitelist: u16,
lexer: Lexer<'p, SyntaxKind>,
builder: GreenNodeBuilder<'p>,
errors: Vec<Error>,
}
impl<'p> Parser<'p> {
/// Required for patch syntax
/// and key matches.
///
/// It allows a part of glob syntax in identifiers as well.
pub(crate) fn parse_key_only(mut self) -> Parse {
self.key_pattern_syntax = true;
let _ = with_node!(self.builder, KEY, self.parse_key());
Parse {
green_node: self.builder.finish(),
errors: self.errors,
}
}
}
/// This is just a convenience type during parsing.
/// It allows using "?", making the code cleaner.
type ParserResult<T> = Result<T, ()>;
// FIXME(recursion)
// Deeply nested structures cause stack overflow,
// this probably has to be rewritten into a state machine
// that contains minimal function calls.
impl<'p> Parser<'p> {
pub(crate) fn new(source: &'p str) -> Self {
Parser {
current_token: None,
skip_whitespace: true,
key_pattern_syntax: false,
error_whitelist: 0,
lexer: SyntaxKind::lexer(source),
builder: Default::default(),
errors: Default::default(),
}
}
fn parse(mut self) -> Parse {
let _ = with_node!(self.builder, ROOT, self.parse_root());
Parse {
green_node: self.builder.finish(),
errors: self.errors,
}
}
fn error(&mut self, message: &str) -> ParserResult<()> {
let span = self.lexer.span();
let err = Error {
range: TextRange::new(
TextSize::from(span.start as u32),
TextSize::from(span.end as u32),
),
message: message.into(),
};
let same_error = self
.errors
.last()
.map(|e| e.range == err.range)
.unwrap_or(false);
if !same_error {
self.add_error(&Error {
range: TextRange::new(
TextSize::from(span.start as u32),
TextSize::from(span.end as u32),
),
message: message.into(),
});
if let Some(t) = self.current_token {
if !self.whitelisted(t) {
self.token_as(ERROR).ok();
}
}
} else {
self.token_as(ERROR).ok();
}
Err(())
}
// report error without consuming the current the token
fn report_error(&mut self, message: &str) -> ParserResult<()> {
let span = self.lexer.span();
self.add_error(&Error {
range: TextRange::new(
TextSize::from(span.start as u32),
TextSize::from(span.end as u32),
),
message: message.into(),
});
Err(())
}
fn add_error(&mut self, e: &Error) {
if let Some(last_err) = self.errors.last_mut() {
if last_err == e {
return;
}
}
self.errors.push(e.clone());
}
#[inline]
fn whitelist_token(&mut self, token: SyntaxKind) {
self.error_whitelist |= token as u16;
}
#[inline]
fn blacklist_token(&mut self, token: SyntaxKind) {
self.error_whitelist &= !(token as u16);
}
#[inline]
fn whitelisted(&self, token: SyntaxKind) -> bool {
self.error_whitelist & token as u16 != 0
}
fn | (&mut self, kind: SyntaxKind, s: &str) {
self.builder.token(kind.into(), s)
}
fn must_token_or(&mut self, kind: SyntaxKind, message: &str) -> ParserResult<()> {
match self.get_token() {
Ok(t) => {
if kind == t {
self.token()
} else {
self.error(message)
}
}
Err(_) => {
self.add_error(&Error {
range: TextRange::new(
self.lexer.span().start.try_into().unwrap(),
self.lexer.span().end.try_into().unwrap(),
),
message: "unexpected EOF".into(),
});
Err(())
}
}
}
// This is the same as `token` but won't consume trailing whitespace.
fn add_token(&mut self) -> ParserResult<()> {
match self.get_token() {
Err(_) => Err(()),
Ok(token) => {
self.builder.token(token.into(), self.lexer.slice());
self.current_token = None;
Ok(())
}
}
}
fn token(&mut self) -> ParserResult<()> {
match self.get_token() {
Err(_) => Err(()),
Ok(token) => self.token_as(token),
}
}
fn token_as(&mut self, kind: SyntaxKind) -> ParserResult<()> {
match self.get_token() {
Err(_) => return Err(()),
Ok(_) => {
self.builder.token(kind.into(), self.lexer.slice());
}
}
self.step();
Ok(())
}
fn step(&mut self) {
self.current_token = None;
while let Some(token) = self.lexer.next() {
match token {
COMMENT => {
match allowed_chars::comment(self.lexer.slice()) {
Ok(_) => {}
Err(err_indices) => {
for e in err_indices {
self.add_error(&Error {
range: TextRange::new(
(self.lexer.span().start + e).try_into().unwrap(),
(self.lexer.span().start + e).try_into().unwrap(),
),
message: "invalid character in comment".into(),
});
}
}
};
self.insert_token(token, self.lexer.slice());
}
WHITESPACE => {
if self.skip_whitespace {
self.insert_token(token, self.lexer.slice());
} else {
self.current_token = Some(token);
break;
}
}
ERROR => {
self.insert_token(token, self.lexer.slice());
let span = self.lexer.span();
self.add_error(&Error {
range: TextRange::new(
span.start.try_into().unwrap(),
span.end.try_into().unwrap(),
),
message: "unexpected token".into(),
})
}
_ => {
self.current_token = Some(token);
break;
}
}
}
}
fn get_token(&mut self) -> ParserResult<SyntaxKind> {
if self.current_token.is_none() {
self.step();
}
self.current_token.ok_or(())
}
fn parse_root(&mut self) -> ParserResult<()> {
// Ensure we have newlines between entries
let mut not_newline = false;
// We want to make sure that an entry spans the
// entire line, so we start/close its node manually.
let mut entry_started = false;
while let Ok(token) = self.get_token() {
match token {
BRACKET_START => {
if entry_started {
self.builder.finish_node();
entry_started = false;
}
if not_newline {
let _ = self.error("expected new line");
continue;
}
not_newline = true;
if self.lexer.remainder().starts_with('[') {
let _ = whitelisted!(
self,
NEWLINE,
with_node!(
self.builder,
TABLE_ARRAY_HEADER,
self.parse_table_array_header()
)
);
} else {
let _ = whitelisted!(
self,
NEWLINE,
with_node!(self.builder, TABLE_HEADER, self.parse_table_header())
);
}
}
NEWLINE => {
not_newline = false;
if entry_started {
self.builder.finish_node();
entry_started = false;
}
let _ = self.token();
}
_ => {
if not_newline {
let _ = self.error("expected new line");
continue;
}
if entry_started {
self.builder.finish_node();
}
not_newline = true;
self.builder.start_node(ENTRY.into());
entry_started = true;
let _ = whitelisted!(self, NEWLINE, self.parse_entry());
}
}
}
if entry_started {
self.builder.finish_node();
}
Ok(())
}
fn parse_table_header(&mut self) -> ParserResult<()> {
self.must_token_or(BRACKET_START, r#"expected "[""#)?;
let _ = with_node!(self.builder, KEY, self.parse_key());
self.must_token_or(BRACKET_END, r#"expected "]""#)?;
Ok(())
}
fn parse_table_array_header(&mut self) -> ParserResult<()> {
self.must_token_or(BRACKET_START, r#"expected "[[""#)?;
self.must_token_or(BRACKET_START, r#"expected "[[""#)?;
let _ = with_node!(self.builder, KEY, self.parse_key());
self.skip_whitespace = false;
let _ = self.must_token_or(BRACKET_END, r#"expected "]]""#);
let _ = self.must_token_or(BRACKET_END, r#"expected "]]""#);
self.skip_whitespace = true;
Ok(())
}
fn parse_entry(&mut self) -> ParserResult<()> {
with_node!(self.builder, KEY, self.parse_key())?;
self.must_token_or(EQ, r#"expected "=""#)?;
with_node!(self.builder, VALUE, self.parse_value())?;
Ok(())
}
fn parse_key(&mut self) -> ParserResult<()> {
if self.parse_ident().is_err() {
return self.report_error("expected identifier");
}
let mut after_period = false;
loop {
let t = match self.get_token() {
Ok(token) => token,
Err(_) => {
if !after_period {
return Ok(());
}
return self.error("unexpected end of input");
}
};
match t {
PERIOD => {
if after_period {
return self.error(r#"unexpected ".""#);
} else {
self.token()?;
after_period = true;
}
}
BRACKET_START if self.key_pattern_syntax => {
self.step();
match self.parse_ident() {
Ok(_) => {}
Err(_) => return self.error("expected identifier"),
}
let token = self.get_token()?;
if !matches!(token, BRACKET_END) {
self.error(r#"expected "]""#)?;
}
self.step();
after_period = false;
}
_ => {
if after_period {
match self.parse_ident() {
Ok(_) => {}
Err(_) => return self.error("expected identifier"),
}
after_period = false;
} else if self.key_pattern_syntax {
return self.error("unexpected identifier");
} else {
break;
}
}
};
}
Ok(())
}
fn parse_ident(&mut self) -> ParserResult<()> {
let t = self.get_token()?;
match t {
IDENT => self.token(),
IDENT_WITH_GLOB => {
if self.key_pattern_syntax {
self.token_as(IDENT)
} else {
self.error("expected identifier")
}
}
INTEGER_HEX | INTEGER_BIN | INTEGER_OCT => self.token_as(IDENT),
INTEGER => {
if self.lexer.slice().starts_with('+') {
Err(())
} else {
self.token_as(IDENT)
}
}
STRING_LITERAL => {
match allowed_chars::string_literal(self.lexer.slice()) {
Ok(_) => {}
Err(err_indices) => {
for e in err_indices {
self.add_error(&Error {
range: TextRange::new(
(self.lexer.span().start + e).try_into().unwrap(),
(self.lexer.span().start + e).try_into().unwrap(),
),
message: "invalid control character in string literal".into(),
});
}
}
};
self.token_as(IDENT)
}
STRING => {
match allowed_chars::string(self.lexer.slice()) {
Ok(_) => {}
Err(err_indices) => {
for e in err_indices {
self.add_error(&Error {
range: TextRange::new(
(self.lexer.span().start + e).try_into().unwrap(),
(self.lexer.span().start + e).try_into().unwrap(),
),
message: "invalid character in string".into(),
});
}
}
};
match check_escape(self.lexer.slice()) {
Ok(_) => self.token_as(IDENT),
Err(err_indices) => {
for e in err_indices {
self.add_error(&Error {
range: TextRange::new(
(self.lexer.span().start + e).try_into().unwrap(),
(self.lexer.span().start + e).try_into().unwrap(),
),
message: "invalid escape sequence".into(),
});
}
// We proceed normally even if
// the string contains invalid escapes.
// It shouldn't affect the rest of the parsing.
self.token_as(IDENT)
}
}
}
FLOAT => {
if self.lexer.slice().starts_with('0') {
self.error("zero-padded numbers are not allowed")
} else if self.lexer.slice().starts_with('+') {
Err(())
} else {
for (i, s) in self.lexer.slice().split('.').enumerate() {
if i != 0 {
self.insert_token(PERIOD, ".");
}
self.insert_token(IDENT, s);
}
self.step();
Ok(())
}
}
BOOL => self.token_as(IDENT),
_ => self.error("expected identifier"),
}
}
fn parse_value(&mut self) -> ParserResult<()> {
let t = match self.get_token() {
Ok(t) => t,
Err(_) => return self.error("expected value"),
};
match t {
BOOL | DATE_TIME_OFFSET | DATE_TIME_LOCAL | DATE | TIME => self.token(),
INTEGER => {
// This is probably a logos bug or a priority issue,
// for some reason "1979-05-27" gets lexed as INTEGER.
if !self.lexer.slice().starts_with('-') && self.lexer.slice().contains('-') {
return self.token_as(DATE);
}
// This could've been done more elegantly probably.
if (self.lexer.slice().starts_with('0') && self.lexer.slice() != "0")
|| (self.lexer.slice().starts_with("+0") && self.lexer.slice() != "+0")
|| (self.lexer.slice().starts_with("-0") && self.lexer.slice() != "-0")
{
self.error("zero-padded integers are not allowed")
} else if !check_underscores(self.lexer.slice(), 10) {
self.error("invalid underscores")
} else {
self.token()
}
}
INTEGER_BIN => {
if !check_underscores(self.lexer.slice(), 2) {
self.error("invalid underscores")
} else {
self.token()
}
}
INTEGER_HEX => {
if !check_underscores(self.lexer.slice(), 16) {
self.error("invalid underscores")
} else {
self.token()
}
}
INTEGER_OCT => {
if !check_underscores(self.lexer.slice(), 8) {
self.error("invalid underscores")
} else {
self.token()
}
}
FLOAT => {
let int_slice = if self.lexer.slice().contains('.') {
self.lexer.slice().split('.').next().unwrap()
} else {
self.lexer.slice().split('e').next().unwrap()
};
if (int_slice.starts_with('0') && int_slice != "0")
|| (int_slice.starts_with("+0") && int_slice != "+0")
|| (int_slice.starts_with("-0") && int_slice != "-0")
{
self.error("zero-padded numbers are not allowed")
} else if !check_underscores(self.lexer.slice(), 10) {
self.error("invalid underscores")
} else {
self.token()
}
}
STRING_LITERAL => {
match allowed_chars::string_literal(self.lexer.slice()) {
Ok(_) => {}
Err(err_indices) => {
for e in err_indices {
self.add_error(&Error {
range: TextRange::new(
(self.lexer.span().start + e).try_into().unwrap(),
(self.lexer.span().start + e).try_into().unwrap(),
),
message: "invalid control character in string literal".into(),
});
}
}
};
self.token()
}
MULTI_LINE_STRING_LITERAL => {
match allowed_chars::multi_line_string_literal(self.lexer.slice()) {
Ok(_) => {}
Err(err_indices) => {
for e in err_indices {
self.add_error(&Error {
range: TextRange::new(
(self.lexer.span().start + e).try_into().unwrap(),
(self.lexer.span().start + e).try_into().unwrap(),
),
message: "invalid character in string".into(),
});
}
}
};
self.token()
}
STRING => {
match allowed_chars::string(self.lexer.slice()) {
Ok(_) => {}
Err(err_indices) => {
for e in err_indices {
self.add_error(&Error {
range: TextRange::new(
(self.lexer.span().start + e).try_into().unwrap(),
(self.lexer.span().start + e).try_into().unwrap(),
),
message: "invalid character in string".into(),
});
}
}
};
match check_escape(self.lexer.slice()) {
Ok(_) => self.token(),
Err(err_indices) => {
for e in err_indices {
self.add_error(&Error {
range: TextRange::new(
(self.lexer.span().start + e).try_into().unwrap(),
(self.lexer.span().start + e).try_into().unwrap(),
),
message: "invalid escape sequence".into(),
});
}
// We proceed normally even if
// the string contains invalid escapes.
// It shouldn't affect the rest of the parsing.
self.token()
}
}
}
MULTI_LINE_STRING => {
match allowed_chars::multi_line_string(self.lexer.slice()) {
Ok(_) => {}
Err(err_indices) => {
for e in err_indices {
self.add_error(&Error {
range: TextRange::new(
(self.lexer.span().start + e).try_into().unwrap(),
(self.lexer.span().start + e).try_into().unwrap(),
),
message: "invalid character in string".into(),
});
}
}
};
match check_escape(self.lexer.slice()) {
Ok(_) => self.token(),
Err(err_indices) => {
for e in err_indices {
self.add_error(&Error {
range: TextRange::new(
(self.lexer.span().start + e).try_into().unwrap(),
(self.lexer.span().start + e).try_into().unwrap(),
),
message: "invalid escape sequence".into(),
});
}
// We proceed normally even if
// the string contains invalid escapes.
// It shouldn't affect the rest of the parsing.
self.token()
}
}
}
BRACKET_START => {
with_node!(self.builder, ARRAY, self.parse_array())
}
BRACE_START => {
with_node!(self.builder, INLINE_TABLE, self.parse_inline_table())
}
IDENT | BRACE_END => {
// FIXME(bit_flags): This branch is just a workaround.
self.report_error("expected value").ok();
Ok(())
}
_ => self.error("expected value"),
}
}
fn parse_inline_table(&mut self) -> ParserResult<()> {
self.must_token_or(BRACE_START, r#"expected "{""#)?;
let mut first = true;
let mut comma_last = false;
let mut was_newline = false;
loop {
let t = match self.get_token() {
Ok(t) => t,
Err(_) => return self.report_error(r#"expected "}""#),
};
match t {
BRACE_END => {
if comma_last {
// it is still reported as a syntax error,
// but we can still analyze it as if it was a valid
// table.
let _ = self.report_error("expected value, trailing comma is not allowed");
}
break self.add_token()?;
}
NEWLINE => {
// To avoid infinite loop in case
// new lines are whitelisted.
if was_newline {
break;
}
let _ = self.error("newline is not allowed in an inline table");
was_newline = true;
}
COMMA => {
if comma_last {
let _ = self.report_error(r#"unexpected ",""#);
}
if first {
let _ = self.error(r#"unexpected ",""#);
} else {
self.token()?;
}
comma_last = true;
was_newline = false;
}
_ => {
was_newline = false;
if !comma_last && !first {
let _ = self.error(r#"expected ",""#);
}
let _ = whitelisted!(
self,
COMMA,
with_node!(self.builder, ENTRY, self.parse_entry())
);
comma_last = false;
}
}
first = false;
}
Ok(())
}
fn parse_array(&mut self) -> ParserResult<()> {
self.must_token_or(BRACKET_START, r#"expected "[""#)?;
let mut first = true;
let mut comma_last = false;
loop {
let t = match self.get_token() {
Ok(t) => t,
Err(_) => {
let _ = self.report_error("unexpected EOF");
return Err(());
}
};
match t {
BRACKET_END => break self.add_token()?,
NEWLINE => {
self.token()?;
continue; // as if it wasn't there, so it doesn't count as a first token
}
COMMA => {
if first || comma_last {
let _ = self.error(r#"unexpected ",""#);
}
self.token()?;
comma_last = true;
}
_ => {
if !comma_last && !first {
let _ = self.error(r#"expected ",""#);
}
let _ = whitelisted!(
self,
COMMA,
with_node!(self.builder, VALUE, self.parse_value())
);
comma_last = false;
}
}
first = false;
}
Ok(())
}
}
fn check_underscores(s: &str, radix: u32) -> bool {
if s.starts_with('_') || s.ends_with('_') {
return false;
}
let mut last_char = 0 as char;
for c in s.chars() {
if c == '_' && !last_char.is_digit(radix) {
return false;
}
if !c.is_digit(radix) && last_char == '_' {
return false;
}
last_char = c;
}
true
}
/// The final results of a parsing.
/// It contains the green tree, and
/// the errors that ocurred during parsing.
#[derive(Debug, Clone)]
pub struct Parse {
pub green_node: GreenNode,
pub errors: Vec<Error>,
}
impl Parse {
/// Turn the parse into a syntax node.
pub fn into_syntax(self) -> SyntaxNode {
SyntaxNode::new_root(self.green_node)
}
/// Turn the parse into a DOM tree.
///
/// Any semantic errors that occur will be collected
/// in the returned DOM node.
pub fn into_dom(self) -> dom::node::Node {
dom::Node::from_syntax(self.into_syntax().into())
}
}
| insert_token |
lib.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_name = "rustc_errors"]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![deny(warnings)]
#![feature(custom_attribute)]
#![allow(unused_attributes)]
#![feature(range_contains)]
#![feature(libc)]
#![feature(conservative_impl_trait)]
#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
#![cfg_attr(stage0, feature(rustc_private))]
#![cfg_attr(stage0, feature(staged_api))]
extern crate term;
extern crate libc;
extern crate serialize as rustc_serialize;
extern crate syntax_pos;
pub use emitter::ColorConfig;
use self::Level::*;
use emitter::{Emitter, EmitterWriter};
use std::cell::{RefCell, Cell};
use std::{error, fmt};
use std::rc::Rc;
pub mod diagnostic;
pub mod diagnostic_builder;
pub mod emitter;
pub mod snippet;
pub mod registry;
pub mod styled_buffer;
mod lock;
use syntax_pos::{BytePos, Loc, FileLinesResult, FileName, MultiSpan, Span, NO_EXPANSION};
#[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)]
pub enum RenderSpan {
/// A FullSpan renders with both with an initial line for the
/// message, prefixed by file:linenum, followed by a summary of
/// the source code covered by the span.
FullSpan(MultiSpan),
/// A suggestion renders with both with an initial line for the
/// message, prefixed by file:linenum, followed by a summary
/// of hypothetical source code, where each `String` is spliced
/// into the lines in place of the code covered by each span.
Suggestion(CodeSuggestion),
}
#[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)]
pub struct CodeSuggestion {
/// Each substitute can have multiple variants due to multiple
/// applicable suggestions
///
/// `foo.bar` might be replaced with `a.b` or `x.y` by replacing
/// `foo` and `bar` on their own:
///
/// ```
/// vec![
/// (0..3, vec!["a", "x"]),
/// (4..7, vec!["b", "y"]),
/// ]
/// ```
///
/// or by replacing the entire span:
///
/// ```
/// vec![(0..7, vec!["a.b", "x.y"])]
/// ```
pub substitution_parts: Vec<Substitution>,
pub msg: String,
}
#[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)]
/// See the docs on `CodeSuggestion::substitutions`
pub struct Substitution {
pub span: Span,
pub substitutions: Vec<String>,
}
pub trait CodeMapper {
fn lookup_char_pos(&self, pos: BytePos) -> Loc;
fn span_to_lines(&self, sp: Span) -> FileLinesResult;
fn span_to_string(&self, sp: Span) -> String;
fn span_to_filename(&self, sp: Span) -> FileName;
fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span>;
}
impl CodeSuggestion {
/// Returns the number of substitutions
fn substitutions(&self) -> usize {
self.substitution_parts[0].substitutions.len()
}
/// Returns the number of substitutions
pub fn substitution_spans<'a>(&'a self) -> impl Iterator<Item = Span> + 'a {
self.substitution_parts.iter().map(|sub| sub.span)
}
/// Returns the assembled code suggestions.
pub fn splice_lines(&self, cm: &CodeMapper) -> Vec<String> {
use syntax_pos::{CharPos, Loc, Pos};
fn push_trailing(buf: &mut String,
line_opt: Option<&str>,
lo: &Loc,
hi_opt: Option<&Loc>) {
let (lo, hi_opt) = (lo.col.to_usize(), hi_opt.map(|hi| hi.col.to_usize()));
if let Some(line) = line_opt {
if let Some(lo) = line.char_indices().map(|(i, _)| i).nth(lo) |
if let None = hi_opt {
buf.push('\n');
}
}
}
if self.substitution_parts.is_empty() {
return vec![String::new()];
}
let mut primary_spans: Vec<_> = self.substitution_parts
.iter()
.map(|sub| (sub.span, &sub.substitutions))
.collect();
// Assumption: all spans are in the same file, and all spans
// are disjoint. Sort in ascending order.
primary_spans.sort_by_key(|sp| sp.0.lo);
// Find the bounding span.
let lo = primary_spans.iter().map(|sp| sp.0.lo).min().unwrap();
let hi = primary_spans.iter().map(|sp| sp.0.hi).min().unwrap();
let bounding_span = Span {
lo: lo,
hi: hi,
ctxt: NO_EXPANSION,
};
let lines = cm.span_to_lines(bounding_span).unwrap();
assert!(!lines.lines.is_empty());
// To build up the result, we do this for each span:
// - push the line segment trailing the previous span
// (at the beginning a "phantom" span pointing at the start of the line)
// - push lines between the previous and current span (if any)
// - if the previous and current span are not on the same line
// push the line segment leading up to the current span
// - splice in the span substitution
//
// Finally push the trailing line segment of the last span
let fm = &lines.file;
let mut prev_hi = cm.lookup_char_pos(bounding_span.lo);
prev_hi.col = CharPos::from_usize(0);
let mut prev_line = fm.get_line(lines.lines[0].line_index);
let mut bufs = vec![String::new(); self.substitutions()];
for (sp, substitutes) in primary_spans {
let cur_lo = cm.lookup_char_pos(sp.lo);
for (buf, substitute) in bufs.iter_mut().zip(substitutes) {
if prev_hi.line == cur_lo.line {
push_trailing(buf, prev_line, &prev_hi, Some(&cur_lo));
} else {
push_trailing(buf, prev_line, &prev_hi, None);
// push lines between the previous and current span (if any)
for idx in prev_hi.line..(cur_lo.line - 1) {
if let Some(line) = fm.get_line(idx) {
buf.push_str(line);
buf.push('\n');
}
}
if let Some(cur_line) = fm.get_line(cur_lo.line - 1) {
buf.push_str(&cur_line[..cur_lo.col.to_usize()]);
}
}
buf.push_str(substitute);
}
prev_hi = cm.lookup_char_pos(sp.hi);
prev_line = fm.get_line(prev_hi.line - 1);
}
for buf in &mut bufs {
// if the replacement already ends with a newline, don't print the next line
if !buf.ends_with('\n') {
push_trailing(buf, prev_line, &prev_hi, None);
}
// remove trailing newline
buf.pop();
}
bufs
}
}
/// Used as a return value to signify a fatal error occurred. (It is also
/// used as the argument to panic at the moment, but that will eventually
/// not be true.)
#[derive(Copy, Clone, Debug)]
#[must_use]
pub struct FatalError;
impl fmt::Display for FatalError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "parser fatal error")
}
}
impl error::Error for FatalError {
fn description(&self) -> &str {
"The parser has encountered a fatal error"
}
}
/// Signifies that the compiler died with an explicit call to `.bug`
/// or `.span_bug` rather than a failed assertion, etc.
#[derive(Copy, Clone, Debug)]
pub struct ExplicitBug;
impl fmt::Display for ExplicitBug {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "parser internal bug")
}
}
impl error::Error for ExplicitBug {
fn description(&self) -> &str {
"The parser has encountered an internal bug"
}
}
pub use diagnostic::{Diagnostic, SubDiagnostic, DiagnosticStyledString, StringPart};
pub use diagnostic_builder::DiagnosticBuilder;
/// A handler deals with errors; certain errors
/// (fatal, bug, unimpl) may cause immediate exit,
/// others log errors for later reporting.
pub struct Handler {
err_count: Cell<usize>,
emitter: RefCell<Box<Emitter>>,
pub can_emit_warnings: bool,
treat_err_as_bug: bool,
continue_after_error: Cell<bool>,
delayed_span_bug: RefCell<Option<(MultiSpan, String)>>,
}
impl Handler {
pub fn with_tty_emitter(color_config: ColorConfig,
can_emit_warnings: bool,
treat_err_as_bug: bool,
cm: Option<Rc<CodeMapper>>)
-> Handler {
let emitter = Box::new(EmitterWriter::stderr(color_config, cm));
Handler::with_emitter(can_emit_warnings, treat_err_as_bug, emitter)
}
pub fn with_emitter(can_emit_warnings: bool,
treat_err_as_bug: bool,
e: Box<Emitter>)
-> Handler {
Handler {
err_count: Cell::new(0),
emitter: RefCell::new(e),
can_emit_warnings: can_emit_warnings,
treat_err_as_bug: treat_err_as_bug,
continue_after_error: Cell::new(true),
delayed_span_bug: RefCell::new(None),
}
}
pub fn set_continue_after_error(&self, continue_after_error: bool) {
self.continue_after_error.set(continue_after_error);
}
pub fn struct_dummy<'a>(&'a self) -> DiagnosticBuilder<'a> {
DiagnosticBuilder::new(self, Level::Cancelled, "")
}
pub fn struct_span_warn<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str)
-> DiagnosticBuilder<'a> {
let mut result = DiagnosticBuilder::new(self, Level::Warning, msg);
result.set_span(sp);
if !self.can_emit_warnings {
result.cancel();
}
result
}
pub fn struct_span_warn_with_code<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str,
code: &str)
-> DiagnosticBuilder<'a> {
let mut result = DiagnosticBuilder::new(self, Level::Warning, msg);
result.set_span(sp);
result.code(code.to_owned());
if !self.can_emit_warnings {
result.cancel();
}
result
}
pub fn struct_warn<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
let mut result = DiagnosticBuilder::new(self, Level::Warning, msg);
if !self.can_emit_warnings {
result.cancel();
}
result
}
pub fn struct_span_err<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str)
-> DiagnosticBuilder<'a> {
let mut result = DiagnosticBuilder::new(self, Level::Error, msg);
result.set_span(sp);
result
}
pub fn struct_span_err_with_code<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str,
code: &str)
-> DiagnosticBuilder<'a> {
let mut result = DiagnosticBuilder::new(self, Level::Error, msg);
result.set_span(sp);
result.code(code.to_owned());
result
}
pub fn struct_err<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
DiagnosticBuilder::new(self, Level::Error, msg)
}
pub fn struct_span_fatal<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str)
-> DiagnosticBuilder<'a> {
let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg);
result.set_span(sp);
result
}
pub fn struct_span_fatal_with_code<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str,
code: &str)
-> DiagnosticBuilder<'a> {
let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg);
result.set_span(sp);
result.code(code.to_owned());
result
}
pub fn struct_fatal<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
DiagnosticBuilder::new(self, Level::Fatal, msg)
}
pub fn cancel(&self, err: &mut DiagnosticBuilder) {
err.cancel();
}
fn panic_if_treat_err_as_bug(&self) {
if self.treat_err_as_bug {
panic!("encountered error with `-Z treat_err_as_bug");
}
}
pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> FatalError {
self.emit(&sp.into(), msg, Fatal);
self.panic_if_treat_err_as_bug();
FatalError
}
pub fn span_fatal_with_code<S: Into<MultiSpan>>(&self,
sp: S,
msg: &str,
code: &str)
-> FatalError {
self.emit_with_code(&sp.into(), msg, code, Fatal);
self.panic_if_treat_err_as_bug();
FatalError
}
pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
self.emit(&sp.into(), msg, Error);
self.panic_if_treat_err_as_bug();
}
pub fn mut_span_err<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str)
-> DiagnosticBuilder<'a> {
let mut result = DiagnosticBuilder::new(self, Level::Error, msg);
result.set_span(sp);
result
}
pub fn span_err_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: &str) {
self.emit_with_code(&sp.into(), msg, code, Error);
self.panic_if_treat_err_as_bug();
}
pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
self.emit(&sp.into(), msg, Warning);
}
pub fn span_warn_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: &str) {
self.emit_with_code(&sp.into(), msg, code, Warning);
}
pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
self.emit(&sp.into(), msg, Bug);
panic!(ExplicitBug);
}
pub fn delay_span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
if self.treat_err_as_bug {
self.span_bug(sp, msg);
}
let mut delayed = self.delayed_span_bug.borrow_mut();
*delayed = Some((sp.into(), msg.to_string()));
}
pub fn span_bug_no_panic<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
self.emit(&sp.into(), msg, Bug);
}
pub fn span_note_without_error<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
self.emit(&sp.into(), msg, Note);
}
pub fn span_note_diag<'a>(&'a self,
sp: Span,
msg: &str)
-> DiagnosticBuilder<'a> {
let mut db = DiagnosticBuilder::new(self, Note, msg);
db.set_span(sp);
db
}
pub fn span_unimpl<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
self.span_bug(sp, &format!("unimplemented {}", msg));
}
pub fn fatal(&self, msg: &str) -> FatalError {
if self.treat_err_as_bug {
self.bug(msg);
}
let mut db = DiagnosticBuilder::new(self, Fatal, msg);
db.emit();
FatalError
}
pub fn err(&self, msg: &str) {
if self.treat_err_as_bug {
self.bug(msg);
}
let mut db = DiagnosticBuilder::new(self, Error, msg);
db.emit();
}
pub fn warn(&self, msg: &str) {
let mut db = DiagnosticBuilder::new(self, Warning, msg);
db.emit();
}
pub fn note_without_error(&self, msg: &str) {
let mut db = DiagnosticBuilder::new(self, Note, msg);
db.emit();
}
pub fn bug(&self, msg: &str) -> ! {
let mut db = DiagnosticBuilder::new(self, Bug, msg);
db.emit();
panic!(ExplicitBug);
}
pub fn unimpl(&self, msg: &str) -> ! {
self.bug(&format!("unimplemented {}", msg));
}
pub fn bump_err_count(&self) {
self.err_count.set(self.err_count.get() + 1);
}
pub fn err_count(&self) -> usize {
self.err_count.get()
}
pub fn has_errors(&self) -> bool {
self.err_count.get() > 0
}
pub fn abort_if_errors(&self) {
let s;
match self.err_count.get() {
0 => {
let delayed_bug = self.delayed_span_bug.borrow();
match *delayed_bug {
Some((ref span, ref errmsg)) => {
self.span_bug(span.clone(), errmsg);
}
_ => {}
}
return;
}
_ => s = "aborting due to previous error(s)".to_string(),
}
panic!(self.fatal(&s));
}
pub fn emit(&self, msp: &MultiSpan, msg: &str, lvl: Level) {
if lvl == Warning && !self.can_emit_warnings {
return;
}
let mut db = DiagnosticBuilder::new(self, lvl, msg);
db.set_span(msp.clone());
db.emit();
if !self.continue_after_error.get() {
self.abort_if_errors();
}
}
pub fn emit_with_code(&self, msp: &MultiSpan, msg: &str, code: &str, lvl: Level) {
if lvl == Warning && !self.can_emit_warnings {
return;
}
let mut db = DiagnosticBuilder::new_with_code(self, lvl, Some(code.to_owned()), msg);
db.set_span(msp.clone());
db.emit();
if !self.continue_after_error.get() {
self.abort_if_errors();
}
}
}
#[derive(Copy, PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
pub enum Level {
Bug,
Fatal,
// An error which while not immediately fatal, should stop the compiler
// progressing beyond the current phase.
PhaseFatal,
Error,
Warning,
Note,
Help,
Cancelled,
}
impl fmt::Display for Level {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.to_str().fmt(f)
}
}
impl Level {
pub fn color(self) -> term::color::Color {
match self {
Bug | Fatal | PhaseFatal | Error => term::color::BRIGHT_RED,
Warning => {
if cfg!(windows) {
term::color::BRIGHT_YELLOW
} else {
term::color::YELLOW
}
}
Note => term::color::BRIGHT_GREEN,
Help => term::color::BRIGHT_CYAN,
Cancelled => unreachable!(),
}
}
pub fn to_str(self) -> &'static str {
match self {
Bug => "error: internal compiler error",
Fatal | PhaseFatal | Error => "error",
Warning => "warning",
Note => "note",
Help => "help",
Cancelled => panic!("Shouldn't call on cancelled error"),
}
}
}
pub fn expect<T, M>(diag: &Handler, opt: Option<T>, msg: M) -> T
where M: FnOnce() -> String
{
match opt {
Some(t) => t,
None => diag.bug(&msg()),
}
}
| {
let hi_opt = hi_opt.and_then(|hi| line.char_indices().map(|(i, _)| i).nth(hi));
buf.push_str(match hi_opt {
Some(hi) => &line[lo..hi],
None => &line[lo..],
});
} |
six.rs | use crate::{
Result,
botw::{Control, SubControl},
};
use byteordered::Endian;
use failure::ResultExt;
use msbt::Header;
use serde_derive::{Deserialize, Serialize};
use std::io::{Cursor, Read, Write};
#[derive(Debug, Deserialize, Serialize)]
pub struct Control1_6 {
pub(crate) field_1: u16,
pub(crate) field_2: u16,
pub(crate) field_3: u16,
pub(crate) field_4: u16,
pub(crate) field_5: u16,
pub(crate) field_6: [u8; 2],
}
impl SubControl for Control1_6 {
fn | (&self) -> u16 {
6
}
fn parse(header: &Header, mut reader: &mut Cursor<&[u8]>) -> Result<Control> {
let mut field_6 = [0; 2];
let field_1 = header.endianness().read_u16(&mut reader).with_context(|_| "could not read field_1")?;
let field_2 = header.endianness().read_u16(&mut reader).with_context(|_| "could not read field_2")?;
let field_3 = header.endianness().read_u16(&mut reader).with_context(|_| "could not read field_3")?;
let field_4 = header.endianness().read_u16(&mut reader).with_context(|_| "could not read field_4")?;
let field_5 = header.endianness().read_u16(&mut reader).with_context(|_| "could not read field_5")?;
reader.read_exact(&mut field_6[..]).with_context(|_| "could not read field_6")?;
Ok(Control::Choice {
unknown: field_1,
choice_labels: vec![field_2, field_3, field_4, field_5],
selected_index: field_6[0],
cancel_index: field_6[1],
})
}
fn write(&self, header: &Header, mut writer: &mut dyn Write) -> Result<()> {
header.endianness().write_u16(&mut writer, self.field_1).with_context(|_| "could not write field_1")?;
header.endianness().write_u16(&mut writer, self.field_2).with_context(|_| "could not write field_2")?;
header.endianness().write_u16(&mut writer, self.field_3).with_context(|_| "could not write field_3")?;
header.endianness().write_u16(&mut writer, self.field_4).with_context(|_| "could not write field_4")?;
header.endianness().write_u16(&mut writer, self.field_5).with_context(|_| "could not write field_5")?;
writer.write_all(&self.field_6).with_context(|_| "could not write field_6")?;
Ok(())
}
}
| marker |
stream.rs | use super::Disposition;
use codec::{self, packet};
use ffi::*;
use format::context::common::Context;
use libc::c_int;
use {DictionaryRef, Discard, Rational};
pub struct Stream<'a> {
context: &'a Context,
index: usize,
}
impl<'a> Stream<'a> {
pub unsafe fn wrap(context: &Context, index: usize) -> Stream {
Stream { context, index }
}
pub unsafe fn as_ptr(&self) -> *const AVStream {
*(*self.context.as_ptr()).streams.add(self.index)
}
}
impl<'a> Stream<'a> {
pub fn id(&self) -> i32 {
unsafe { (*self.as_ptr()).id }
}
pub fn codec(&self) -> codec::Context {
unsafe { codec::Context::wrap((*self.as_ptr()).codec, Some(self.context.destructor())) }
}
pub fn parameters(&self) -> codec::Parameters {
unsafe {
codec::Parameters::wrap((*self.as_ptr()).codecpar, Some(self.context.destructor()))
}
}
pub fn index(&self) -> usize {
unsafe { (*self.as_ptr()).index as usize }
}
pub fn time_base(&self) -> Rational {
unsafe { Rational::from((*self.as_ptr()).time_base) }
}
pub fn start_time(&self) -> i64 {
unsafe { (*self.as_ptr()).start_time }
}
pub fn duration(&self) -> i64 {
unsafe { (*self.as_ptr()).duration }
}
pub fn frames(&self) -> i64 {
unsafe { (*self.as_ptr()).nb_frames }
}
pub fn disposition(&self) -> Disposition {
unsafe { Disposition::from_bits_truncate((*self.as_ptr()).disposition) }
}
pub fn discard(&self) -> Discard {
unsafe { Discard::from((*self.as_ptr()).discard) }
}
pub fn side_data(&self) -> SideDataIter {
SideDataIter::new(self)
}
pub fn rate(&self) -> Rational {
unsafe { Rational::from(av_stream_get_r_frame_rate(self.as_ptr())) }
}
pub fn avg_frame_rate(&self) -> Rational |
pub fn metadata(&self) -> DictionaryRef {
unsafe { DictionaryRef::wrap((*self.as_ptr()).metadata) }
}
}
impl<'a> PartialEq for Stream<'a> {
fn eq(&self, other: &Self) -> bool {
unsafe { self.as_ptr() == other.as_ptr() }
}
}
impl<'a> Eq for Stream<'a> {}
pub struct SideDataIter<'a> {
stream: &'a Stream<'a>,
current: c_int,
}
impl<'a> SideDataIter<'a> {
pub fn new<'sd, 's: 'sd>(stream: &'s Stream) -> SideDataIter<'sd> {
SideDataIter { stream, current: 0 }
}
}
impl<'a> Iterator for SideDataIter<'a> {
type Item = packet::SideData<'a>;
fn next(&mut self) -> Option<<Self as Iterator>::Item> {
unsafe {
if self.current >= (*self.stream.as_ptr()).nb_side_data {
return None;
}
self.current += 1;
Some(packet::SideData::wrap(
(*self.stream.as_ptr())
.side_data
.offset((self.current - 1) as isize),
))
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
unsafe {
let length = (*self.stream.as_ptr()).nb_side_data as usize;
(
length - self.current as usize,
Some(length - self.current as usize),
)
}
}
}
impl<'a> ExactSizeIterator for SideDataIter<'a> {}
| {
unsafe { Rational::from((*self.as_ptr()).avg_frame_rate) }
} |
playback.go | package aurestplayback
import (
"context"
"encoding/json"
aurestclientapi "github.com/StephanHCB/go-autumn-restclient/api"
aurestrecorder "github.com/StephanHCB/go-autumn-restclient/implementation/recorder"
"os"
"strings"
"time"
)
type PlaybackImpl struct {
RecorderPath string
// Now is exposed so tests can fixate the time by overwriting this field
Now func() time.Time
}
// New builds a new http client simulator based on playback.
//
// Use this in your tests.
func New(recorderPath string) aurestclientapi.Client {
if recorderPath != "" {
if !strings.HasSuffix(recorderPath, "/") {
recorderPath += "/"
}
}
return &PlaybackImpl{
RecorderPath: recorderPath,
Now: time.Now,
}
}
func (c *PlaybackImpl) Perform(_ context.Context, method string, requestUrl string, _ interface{}, response *aurestclientapi.ParsedResponse) error {
filename, err := aurestrecorder.ConstructFilename(method, requestUrl)
if err != nil {
return err
}
jsonBytes, err := os.ReadFile(c.RecorderPath + filename)
if err != nil {
return err
}
recording := aurestrecorder.RecorderData{}
err = json.Unmarshal(jsonBytes, &recording)
if err != nil {
return err
}
response.Header = recording.ParsedResponse.Header
response.Status = recording.ParsedResponse.Status
response.Time = c.Now()
// cannot just assign the body, need to re-parse into the existing pointer - using a json round trip
bodyJsonBytes, err := json.Marshal(recording.ParsedResponse.Body)
if err != nil {
return err
}
err = json.Unmarshal(bodyJsonBytes, response.Body)
if err != nil |
return recording.Error
}
| {
return err
} |
credential_secret.go | // Code generated by go-swagger; DO NOT EDIT.
package models
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"github.com/go-openapi/errors"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
"github.com/go-openapi/validate"
)
// CredentialSecret credential secret
//
// swagger:model CredentialSecret
type CredentialSecret struct {
// client id
// Format: uuid
ClientID strfmt.UUID `json:"client_id,omitempty"`
// client secret
ClientSecret string `json:"client_secret,omitempty"`
}
// Validate validates this credential secret
func (m *CredentialSecret) Validate(formats strfmt.Registry) error {
var res []error
if err := m.validateClientID(formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (m *CredentialSecret) validateClientID(formats strfmt.Registry) error {
if swag.IsZero(m.ClientID) { // not required
return nil
}
if err := validate.FormatOf("client_id", "body", "uuid", m.ClientID.String(), formats); err != nil |
return nil
}
// MarshalBinary interface implementation
func (m *CredentialSecret) MarshalBinary() ([]byte, error) {
if m == nil {
return nil, nil
}
return swag.WriteJSON(m)
}
// UnmarshalBinary interface implementation
func (m *CredentialSecret) UnmarshalBinary(b []byte) error {
var res CredentialSecret
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*m = res
return nil
}
| {
return err
} |
pprust.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use abi;
use ast::{P, RegionTyParamBound, TraitTyParamBound, Required, Provided};
use ast;
use ast_util;
use owned_slice::OwnedSlice;
use attr::{AttrMetaMethods, AttributeMethods};
use codemap::{CodeMap, BytePos};
use codemap;
use diagnostic;
use parse::classify::expr_is_simple_block;
use parse::token::IdentInterner;
use parse::{comments, token};
use parse;
use print::pp::{break_offset, word, space, zerobreak, hardbreak};
use print::pp::{Breaks, Consistent, Inconsistent, eof};
use print::pp;
use std::cast;
use std::char;
use std::io::{IoResult, MemWriter};
use std::io;
use std::rc::Rc;
use std::str;
use std::strbuf::StrBuf;
pub enum AnnNode<'a> {
NodeBlock(&'a ast::Block),
NodeItem(&'a ast::Item),
NodeExpr(&'a ast::Expr),
NodePat(&'a ast::Pat),
}
pub trait PpAnn {
fn pre(&self, _state: &mut State, _node: AnnNode) -> IoResult<()> { Ok(()) }
fn post(&self, _state: &mut State, _node: AnnNode) -> IoResult<()> { Ok(()) }
}
pub struct NoAnn;
impl PpAnn for NoAnn {}
pub struct CurrentCommentAndLiteral {
cur_cmnt: uint,
cur_lit: uint,
}
pub struct State<'a> {
pub s: pp::Printer,
cm: Option<&'a CodeMap>,
intr: Rc<token::IdentInterner>,
comments: Option<Vec<comments::Comment> >,
literals: Option<Vec<comments::Literal> >,
cur_cmnt_and_lit: CurrentCommentAndLiteral,
boxes: Vec<pp::Breaks>,
ann: &'a PpAnn
}
pub fn rust_printer(writer: ~io::Writer) -> State<'static> {
static NO_ANN: NoAnn = NoAnn;
rust_printer_annotated(writer, &NO_ANN)
}
pub fn rust_printer_annotated<'a>(writer: ~io::Writer,
ann: &'a PpAnn) -> State<'a> {
State {
s: pp::mk_printer(writer, default_columns),
cm: None,
intr: token::get_ident_interner(),
comments: None,
literals: None,
cur_cmnt_and_lit: CurrentCommentAndLiteral {
cur_cmnt: 0,
cur_lit: 0
},
boxes: Vec::new(),
ann: ann
}
}
pub static indent_unit: uint = 4u;
pub static default_columns: uint = 78u;
// Requires you to pass an input filename and reader so that
// it can scan the input text for comments and literals to
// copy forward.
pub fn print_crate<'a>(cm: &'a CodeMap,
span_diagnostic: &diagnostic::SpanHandler,
krate: &ast::Crate,
filename: ~str,
input: &mut io::Reader,
out: ~io::Writer,
ann: &'a PpAnn,
is_expanded: bool) -> IoResult<()> {
let (cmnts, lits) = comments::gather_comments_and_literals(
span_diagnostic,
filename,
input
);
let mut s = State {
s: pp::mk_printer(out, default_columns),
cm: Some(cm),
intr: token::get_ident_interner(),
comments: Some(cmnts),
// If the code is post expansion, don't use the table of
// literals, since it doesn't correspond with the literals
// in the AST anymore.
literals: if is_expanded {
None
} else {
Some(lits)
},
cur_cmnt_and_lit: CurrentCommentAndLiteral {
cur_cmnt: 0,
cur_lit: 0
},
boxes: Vec::new(),
ann: ann
};
try!(s.print_mod(&krate.module, krate.attrs.as_slice()));
try!(s.print_remaining_comments());
eof(&mut s.s)
}
pub fn to_str(f: |&mut State| -> IoResult<()>) -> ~str {
let mut s = rust_printer(~MemWriter::new());
f(&mut s).unwrap();
eof(&mut s.s).unwrap();
unsafe {
// FIXME(pcwalton): A nasty function to extract the string from an `io::Writer`
// that we "know" to be a `MemWriter` that works around the lack of checked
// downcasts.
let (_, wr): (uint, ~MemWriter) = cast::transmute_copy(&s.s.out);
let result = str::from_utf8_owned(wr.get_ref().to_owned()).unwrap();
cast::forget(wr);
result
}
}
pub fn ty_to_str(ty: &ast::Ty) -> ~str {
to_str(|s| s.print_type(ty))
}
pub fn pat_to_str(pat: &ast::Pat) -> ~str {
to_str(|s| s.print_pat(pat))
}
pub fn expr_to_str(e: &ast::Expr) -> ~str {
to_str(|s| s.print_expr(e))
}
pub fn lifetime_to_str(e: &ast::Lifetime) -> ~str {
to_str(|s| s.print_lifetime(e))
}
pub fn tt_to_str(tt: &ast::TokenTree) -> ~str {
to_str(|s| s.print_tt(tt))
}
pub fn tts_to_str(tts: &[ast::TokenTree]) -> ~str {
to_str(|s| s.print_tts(&tts))
}
pub fn stmt_to_str(stmt: &ast::Stmt) -> ~str {
to_str(|s| s.print_stmt(stmt))
}
pub fn item_to_str(i: &ast::Item) -> ~str {
to_str(|s| s.print_item(i))
}
pub fn generics_to_str(generics: &ast::Generics) -> ~str {
to_str(|s| s.print_generics(generics))
}
pub fn ty_method_to_str(p: &ast::TypeMethod) -> ~str {
to_str(|s| s.print_ty_method(p))
}
pub fn method_to_str(p: &ast::Method) -> ~str {
to_str(|s| s.print_method(p))
}
pub fn fn_block_to_str(p: &ast::FnDecl) -> ~str {
to_str(|s| s.print_fn_block_args(p))
}
pub fn path_to_str(p: &ast::Path) -> ~str {
to_str(|s| s.print_path(p, false))
}
pub fn fun_to_str(decl: &ast::FnDecl, fn_style: ast::FnStyle, name: ast::Ident,
opt_explicit_self: Option<ast::ExplicitSelf_>,
generics: &ast::Generics) -> ~str {
to_str(|s| {
try!(s.print_fn(decl, Some(fn_style), abi::Rust,
name, generics, opt_explicit_self, ast::Inherited));
try!(s.end()); // Close the head box
s.end() // Close the outer box
})
}
pub fn block_to_str(blk: &ast::Block) -> ~str {
to_str(|s| {
// containing cbox, will be closed by print-block at }
try!(s.cbox(indent_unit));
// head-ibox, will be closed by print-block after {
try!(s.ibox(0u));
s.print_block(blk)
})
}
pub fn meta_item_to_str(mi: &ast::MetaItem) -> ~str {
to_str(|s| s.print_meta_item(mi))
}
pub fn attribute_to_str(attr: &ast::Attribute) -> ~str {
to_str(|s| s.print_attribute(attr))
}
pub fn lit_to_str(l: &ast::Lit) -> ~str {
to_str(|s| s.print_literal(l))
}
pub fn explicit_self_to_str(explicit_self: ast::ExplicitSelf_) -> ~str {
to_str(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {}))
}
pub fn variant_to_str(var: &ast::Variant) -> ~str {
to_str(|s| s.print_variant(var))
}
pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> ~str {
match vis {
ast::Public => format!("pub {}", s),
ast::Inherited => s.to_owned()
}
}
impl<'a> State<'a> {
pub fn ibox(&mut self, u: uint) -> IoResult<()> {
self.boxes.push(pp::Inconsistent);
pp::ibox(&mut self.s, u)
}
pub fn end(&mut self) -> IoResult<()> {
self.boxes.pop().unwrap();
pp::end(&mut self.s)
}
pub fn cbox(&mut self, u: uint) -> IoResult<()> {
self.boxes.push(pp::Consistent);
pp::cbox(&mut self.s, u)
}
// "raw box"
pub fn rbox(&mut self, u: uint, b: pp::Breaks) -> IoResult<()> {
self.boxes.push(b);
pp::rbox(&mut self.s, u, b)
}
pub fn nbsp(&mut self) -> IoResult<()> { word(&mut self.s, " ") }
pub fn word_nbsp(&mut self, w: &str) -> IoResult<()> {
try!(word(&mut self.s, w));
self.nbsp()
}
pub fn word_space(&mut self, w: &str) -> IoResult<()> {
try!(word(&mut self.s, w));
space(&mut self.s)
}
pub fn popen(&mut self) -> IoResult<()> { word(&mut self.s, "(") }
pub fn pclose(&mut self) -> IoResult<()> { word(&mut self.s, ")") }
pub fn head(&mut self, w: &str) -> IoResult<()> {
// outer-box is consistent
try!(self.cbox(indent_unit));
// head-box is inconsistent
try!(self.ibox(w.len() + 1));
// keyword that starts the head
if !w.is_empty() {
try!(self.word_nbsp(w));
}
Ok(())
}
pub fn bopen(&mut self) -> IoResult<()> {
try!(word(&mut self.s, "{"));
self.end() // close the head-box
}
pub fn bclose_(&mut self, span: codemap::Span,
indented: uint) -> IoResult<()> {
self.bclose_maybe_open(span, indented, true) | indented: uint, close_box: bool) -> IoResult<()> {
try!(self.maybe_print_comment(span.hi));
try!(self.break_offset_if_not_bol(1u, -(indented as int)));
try!(word(&mut self.s, "}"));
if close_box {
try!(self.end()); // close the outer-box
}
Ok(())
}
pub fn bclose(&mut self, span: codemap::Span) -> IoResult<()> {
self.bclose_(span, indent_unit)
}
pub fn is_begin(&mut self) -> bool {
match self.s.last_token() { pp::Begin(_) => true, _ => false }
}
pub fn is_end(&mut self) -> bool {
match self.s.last_token() { pp::End => true, _ => false }
}
pub fn is_bol(&mut self) -> bool {
self.s.last_token().is_eof() || self.s.last_token().is_hardbreak_tok()
}
pub fn in_cbox(&self) -> bool {
match self.boxes.last() {
Some(&last_box) => last_box == pp::Consistent,
None => false
}
}
pub fn hardbreak_if_not_bol(&mut self) -> IoResult<()> {
if !self.is_bol() {
try!(hardbreak(&mut self.s))
}
Ok(())
}
pub fn space_if_not_bol(&mut self) -> IoResult<()> {
if !self.is_bol() { try!(space(&mut self.s)); }
Ok(())
}
pub fn break_offset_if_not_bol(&mut self, n: uint,
off: int) -> IoResult<()> {
if !self.is_bol() {
break_offset(&mut self.s, n, off)
} else {
if off != 0 && self.s.last_token().is_hardbreak_tok() {
// We do something pretty sketchy here: tuck the nonzero
// offset-adjustment we were going to deposit along with the
// break into the previous hardbreak.
self.s.replace_last_token(pp::hardbreak_tok_offset(off));
}
Ok(())
}
}
// Synthesizes a comment that was not textually present in the original source
// file.
pub fn synth_comment(&mut self, text: ~str) -> IoResult<()> {
try!(word(&mut self.s, "/*"));
try!(space(&mut self.s));
try!(word(&mut self.s, text));
try!(space(&mut self.s));
word(&mut self.s, "*/")
}
pub fn commasep<T>(&mut self, b: Breaks, elts: &[T],
op: |&mut State, &T| -> IoResult<()>)
-> IoResult<()> {
try!(self.rbox(0u, b));
let mut first = true;
for elt in elts.iter() {
if first { first = false; } else { try!(self.word_space(",")); }
try!(op(self, elt));
}
self.end()
}
pub fn commasep_cmnt<T>(
&mut self,
b: Breaks,
elts: &[T],
op: |&mut State, &T| -> IoResult<()>,
get_span: |&T| -> codemap::Span) -> IoResult<()> {
try!(self.rbox(0u, b));
let len = elts.len();
let mut i = 0u;
for elt in elts.iter() {
try!(self.maybe_print_comment(get_span(elt).hi));
try!(op(self, elt));
i += 1u;
if i < len {
try!(word(&mut self.s, ","));
try!(self.maybe_print_trailing_comment(get_span(elt),
Some(get_span(&elts[i]).hi)));
try!(self.space_if_not_bol());
}
}
self.end()
}
pub fn commasep_exprs(&mut self, b: Breaks,
exprs: &[@ast::Expr]) -> IoResult<()> {
self.commasep_cmnt(b, exprs, |s, &e| s.print_expr(e), |e| e.span)
}
pub fn print_mod(&mut self, _mod: &ast::Mod,
attrs: &[ast::Attribute]) -> IoResult<()> {
try!(self.print_inner_attributes(attrs));
for vitem in _mod.view_items.iter() {
try!(self.print_view_item(vitem));
}
for item in _mod.items.iter() {
try!(self.print_item(*item));
}
Ok(())
}
pub fn print_foreign_mod(&mut self, nmod: &ast::ForeignMod,
attrs: &[ast::Attribute]) -> IoResult<()> {
try!(self.print_inner_attributes(attrs));
for vitem in nmod.view_items.iter() {
try!(self.print_view_item(vitem));
}
for item in nmod.items.iter() {
try!(self.print_foreign_item(*item));
}
Ok(())
}
pub fn print_opt_lifetime(&mut self,
lifetime: &Option<ast::Lifetime>) -> IoResult<()> {
for l in lifetime.iter() {
try!(self.print_lifetime(l));
try!(self.nbsp());
}
Ok(())
}
pub fn print_type(&mut self, ty: &ast::Ty) -> IoResult<()> {
try!(self.maybe_print_comment(ty.span.lo));
try!(self.ibox(0u));
match ty.node {
ast::TyNil => try!(word(&mut self.s, "()")),
ast::TyBot => try!(word(&mut self.s, "!")),
ast::TyBox(ty) => {
try!(word(&mut self.s, "@"));
try!(self.print_type(ty));
}
ast::TyUniq(ty) => {
try!(word(&mut self.s, "~"));
try!(self.print_type(ty));
}
ast::TyVec(ty) => {
try!(word(&mut self.s, "["));
try!(self.print_type(ty));
try!(word(&mut self.s, "]"));
}
ast::TyPtr(ref mt) => {
try!(word(&mut self.s, "*"));
try!(self.print_mt(mt));
}
ast::TyRptr(ref lifetime, ref mt) => {
try!(word(&mut self.s, "&"));
try!(self.print_opt_lifetime(lifetime));
try!(self.print_mt(mt));
}
ast::TyTup(ref elts) => {
try!(self.popen());
try!(self.commasep(Inconsistent, elts.as_slice(),
|s, ty| s.print_type_ref(ty)));
if elts.len() == 1 {
try!(word(&mut self.s, ","));
}
try!(self.pclose());
}
ast::TyBareFn(f) => {
let generics = ast::Generics {
lifetimes: f.lifetimes.clone(),
ty_params: OwnedSlice::empty()
};
try!(self.print_ty_fn(Some(f.abi), None, &None,
f.fn_style, ast::Many, f.decl, None, &None,
Some(&generics), None));
}
ast::TyClosure(f, ref region) => {
let generics = ast::Generics {
lifetimes: f.lifetimes.clone(),
ty_params: OwnedSlice::empty()
};
try!(self.print_ty_fn(None, Some('&'), region, f.fn_style,
f.onceness, f.decl, None, &f.bounds,
Some(&generics), None));
}
ast::TyProc(f) => {
let generics = ast::Generics {
lifetimes: f.lifetimes.clone(),
ty_params: OwnedSlice::empty()
};
try!(self.print_ty_fn(None, Some('~'), &None, f.fn_style,
f.onceness, f.decl, None, &f.bounds,
Some(&generics), None));
}
ast::TyPath(ref path, ref bounds, _) => {
try!(self.print_bounded_path(path, bounds));
}
ast::TyFixedLengthVec(ty, v) => {
try!(word(&mut self.s, "["));
try!(self.print_type(ty));
try!(word(&mut self.s, ", .."));
try!(self.print_expr(v));
try!(word(&mut self.s, "]"));
}
ast::TyTypeof(e) => {
try!(word(&mut self.s, "typeof("));
try!(self.print_expr(e));
try!(word(&mut self.s, ")"));
}
ast::TyInfer => {
try!(word(&mut self.s, "_"));
}
}
self.end()
}
pub fn print_type_ref(&mut self, ty: &P<ast::Ty>) -> IoResult<()> {
self.print_type(*ty)
}
pub fn print_foreign_item(&mut self,
item: &ast::ForeignItem) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(item.span.lo));
try!(self.print_outer_attributes(item.attrs.as_slice()));
match item.node {
ast::ForeignItemFn(decl, ref generics) => {
try!(self.print_fn(decl, None, abi::Rust, item.ident, generics,
None, item.vis));
try!(self.end()); // end head-ibox
try!(word(&mut self.s, ";"));
self.end() // end the outer fn box
}
ast::ForeignItemStatic(t, m) => {
try!(self.head(visibility_qualified(item.vis, "static")));
if m {
try!(self.word_space("mut"));
}
try!(self.print_ident(item.ident));
try!(self.word_space(":"));
try!(self.print_type(t));
try!(word(&mut self.s, ";"));
try!(self.end()); // end the head-ibox
self.end() // end the outer cbox
}
}
}
pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(item.span.lo));
try!(self.print_outer_attributes(item.attrs.as_slice()));
try!(self.ann.pre(self, NodeItem(item)));
match item.node {
ast::ItemStatic(ty, m, expr) => {
try!(self.head(visibility_qualified(item.vis, "static")));
if m == ast::MutMutable {
try!(self.word_space("mut"));
}
try!(self.print_ident(item.ident));
try!(self.word_space(":"));
try!(self.print_type(ty));
try!(space(&mut self.s));
try!(self.end()); // end the head-ibox
try!(self.word_space("="));
try!(self.print_expr(expr));
try!(word(&mut self.s, ";"));
try!(self.end()); // end the outer cbox
}
ast::ItemFn(decl, fn_style, abi, ref typarams, body) => {
try!(self.print_fn(
decl,
Some(fn_style),
abi,
item.ident,
typarams,
None,
item.vis
));
try!(word(&mut self.s, " "));
try!(self.print_block_with_attrs(body, item.attrs.as_slice()));
}
ast::ItemMod(ref _mod) => {
try!(self.head(visibility_qualified(item.vis, "mod")));
try!(self.print_ident(item.ident));
try!(self.nbsp());
try!(self.bopen());
try!(self.print_mod(_mod, item.attrs.as_slice()));
try!(self.bclose(item.span));
}
ast::ItemForeignMod(ref nmod) => {
try!(self.head("extern"));
try!(self.word_nbsp(nmod.abi.to_str()));
try!(self.bopen());
try!(self.print_foreign_mod(nmod, item.attrs.as_slice()));
try!(self.bclose(item.span));
}
ast::ItemTy(ty, ref params) => {
try!(self.ibox(indent_unit));
try!(self.ibox(0u));
try!(self.word_nbsp(visibility_qualified(item.vis, "type")));
try!(self.print_ident(item.ident));
try!(self.print_generics(params));
try!(self.end()); // end the inner ibox
try!(space(&mut self.s));
try!(self.word_space("="));
try!(self.print_type(ty));
try!(word(&mut self.s, ";"));
try!(self.end()); // end the outer ibox
}
ast::ItemEnum(ref enum_definition, ref params) => {
try!(self.print_enum_def(
enum_definition,
params,
item.ident,
item.span,
item.vis
));
}
ast::ItemStruct(struct_def, ref generics) => {
if struct_def.is_virtual {
try!(self.word_space("virtual"));
}
try!(self.head(visibility_qualified(item.vis, "struct")));
try!(self.print_struct(struct_def, generics, item.ident, item.span));
}
ast::ItemImpl(ref generics, ref opt_trait, ty, ref methods) => {
try!(self.head(visibility_qualified(item.vis, "impl")));
if generics.is_parameterized() {
try!(self.print_generics(generics));
try!(space(&mut self.s));
}
match opt_trait {
&Some(ref t) => {
try!(self.print_trait_ref(t));
try!(space(&mut self.s));
try!(self.word_space("for"));
}
&None => {}
}
try!(self.print_type(ty));
try!(space(&mut self.s));
try!(self.bopen());
try!(self.print_inner_attributes(item.attrs.as_slice()));
for meth in methods.iter() {
try!(self.print_method(*meth));
}
try!(self.bclose(item.span));
}
ast::ItemTrait(ref generics, ref sized, ref traits, ref methods) => {
try!(self.head(visibility_qualified(item.vis, "trait")));
try!(self.print_ident(item.ident));
try!(self.print_generics(generics));
if *sized == ast::DynSize {
try!(space(&mut self.s));
try!(word(&mut self.s, "for type"));
}
if traits.len() != 0u {
try!(word(&mut self.s, ":"));
for (i, trait_) in traits.iter().enumerate() {
try!(self.nbsp());
if i != 0 {
try!(self.word_space("+"));
}
try!(self.print_path(&trait_.path, false));
}
}
try!(word(&mut self.s, " "));
try!(self.bopen());
for meth in methods.iter() {
try!(self.print_trait_method(meth));
}
try!(self.bclose(item.span));
}
// I think it's reasonable to hide the context here:
ast::ItemMac(codemap::Spanned { node: ast::MacInvocTT(ref pth, ref tts, _),
..}) => {
try!(self.print_visibility(item.vis));
try!(self.print_path(pth, false));
try!(word(&mut self.s, "! "));
try!(self.print_ident(item.ident));
try!(self.cbox(indent_unit));
try!(self.popen());
try!(self.print_tts(&(tts.as_slice())));
try!(self.pclose());
try!(self.end());
}
}
self.ann.post(self, NodeItem(item))
}
fn print_trait_ref(&mut self, t: &ast::TraitRef) -> IoResult<()> {
self.print_path(&t.path, false)
}
pub fn print_enum_def(&mut self, enum_definition: &ast::EnumDef,
generics: &ast::Generics, ident: ast::Ident,
span: codemap::Span,
visibility: ast::Visibility) -> IoResult<()> {
try!(self.head(visibility_qualified(visibility, "enum")));
try!(self.print_ident(ident));
try!(self.print_generics(generics));
try!(space(&mut self.s));
self.print_variants(enum_definition.variants.as_slice(), span)
}
pub fn print_variants(&mut self,
variants: &[P<ast::Variant>],
span: codemap::Span) -> IoResult<()> {
try!(self.bopen());
for &v in variants.iter() {
try!(self.space_if_not_bol());
try!(self.maybe_print_comment(v.span.lo));
try!(self.print_outer_attributes(v.node.attrs.as_slice()));
try!(self.ibox(indent_unit));
try!(self.print_variant(v));
try!(word(&mut self.s, ","));
try!(self.end());
try!(self.maybe_print_trailing_comment(v.span, None));
}
self.bclose(span)
}
pub fn print_visibility(&mut self, vis: ast::Visibility) -> IoResult<()> {
match vis {
ast::Public => self.word_nbsp("pub"),
ast::Inherited => Ok(())
}
}
pub fn print_struct(&mut self,
struct_def: &ast::StructDef,
generics: &ast::Generics,
ident: ast::Ident,
span: codemap::Span) -> IoResult<()> {
try!(self.print_ident(ident));
try!(self.print_generics(generics));
match struct_def.super_struct {
Some(t) => {
try!(self.word_space(":"));
try!(self.print_type(t));
},
None => {},
}
if ast_util::struct_def_is_tuple_like(struct_def) {
if !struct_def.fields.is_empty() {
try!(self.popen());
try!(self.commasep(
Inconsistent, struct_def.fields.as_slice(),
|s, field| {
match field.node.kind {
ast::NamedField(..) => fail!("unexpected named field"),
ast::UnnamedField(vis) => {
try!(s.print_visibility(vis));
try!(s.maybe_print_comment(field.span.lo));
s.print_type(field.node.ty)
}
}
}
));
try!(self.pclose());
}
try!(word(&mut self.s, ";"));
try!(self.end());
self.end() // close the outer-box
} else {
try!(self.nbsp());
try!(self.bopen());
try!(self.hardbreak_if_not_bol());
for field in struct_def.fields.iter() {
match field.node.kind {
ast::UnnamedField(..) => fail!("unexpected unnamed field"),
ast::NamedField(ident, visibility) => {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(field.span.lo));
try!(self.print_outer_attributes(field.node.attrs.as_slice()));
try!(self.print_visibility(visibility));
try!(self.print_ident(ident));
try!(self.word_nbsp(":"));
try!(self.print_type(field.node.ty));
try!(word(&mut self.s, ","));
}
}
}
self.bclose(span)
}
}
/// This doesn't deserve to be called "pretty" printing, but it should be
/// meaning-preserving. A quick hack that might help would be to look at the
/// spans embedded in the TTs to decide where to put spaces and newlines.
/// But it'd be better to parse these according to the grammar of the
/// appropriate macro, transcribe back into the grammar we just parsed from,
/// and then pretty-print the resulting AST nodes (so, e.g., we print
/// expression arguments as expressions). It can be done! I think.
pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
match *tt {
ast::TTDelim(ref tts) => self.print_tts(&(tts.as_slice())),
ast::TTTok(_, ref tk) => {
word(&mut self.s, parse::token::to_str(tk))
}
ast::TTSeq(_, ref tts, ref sep, zerok) => {
try!(word(&mut self.s, "$("));
for tt_elt in (*tts).iter() {
try!(self.print_tt(tt_elt));
}
try!(word(&mut self.s, ")"));
match *sep {
Some(ref tk) => {
try!(word(&mut self.s, parse::token::to_str(tk)));
}
None => ()
}
word(&mut self.s, if zerok { "*" } else { "+" })
}
ast::TTNonterminal(_, name) => {
try!(word(&mut self.s, "$"));
self.print_ident(name)
}
}
}
pub fn print_tts(&mut self, tts: & &[ast::TokenTree]) -> IoResult<()> {
try!(self.ibox(0));
for (i, tt) in tts.iter().enumerate() {
if i != 0 {
try!(space(&mut self.s));
}
try!(self.print_tt(tt));
}
self.end()
}
pub fn print_variant(&mut self, v: &ast::Variant) -> IoResult<()> {
try!(self.print_visibility(v.node.vis));
match v.node.kind {
ast::TupleVariantKind(ref args) => {
try!(self.print_ident(v.node.name));
if !args.is_empty() {
try!(self.popen());
try!(self.commasep(Consistent,
args.as_slice(),
|s, arg| s.print_type(arg.ty)));
try!(self.pclose());
}
}
ast::StructVariantKind(struct_def) => {
try!(self.head(""));
let generics = ast_util::empty_generics();
try!(self.print_struct(struct_def, &generics, v.node.name, v.span));
}
}
match v.node.disr_expr {
Some(d) => {
try!(space(&mut self.s));
try!(self.word_space("="));
self.print_expr(d)
}
_ => Ok(())
}
}
pub fn print_ty_method(&mut self, m: &ast::TypeMethod) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(m.span.lo));
try!(self.print_outer_attributes(m.attrs.as_slice()));
try!(self.print_ty_fn(None,
None,
&None,
m.fn_style,
ast::Many,
m.decl,
Some(m.ident),
&None,
Some(&m.generics),
Some(m.explicit_self.node)));
word(&mut self.s, ";")
}
pub fn print_trait_method(&mut self,
m: &ast::TraitMethod) -> IoResult<()> {
match *m {
Required(ref ty_m) => self.print_ty_method(ty_m),
Provided(m) => self.print_method(m)
}
}
pub fn print_method(&mut self, meth: &ast::Method) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(meth.span.lo));
try!(self.print_outer_attributes(meth.attrs.as_slice()));
try!(self.print_fn(meth.decl, Some(meth.fn_style), abi::Rust,
meth.ident, &meth.generics, Some(meth.explicit_self.node),
meth.vis));
try!(word(&mut self.s, " "));
self.print_block_with_attrs(meth.body, meth.attrs.as_slice())
}
pub fn print_outer_attributes(&mut self,
attrs: &[ast::Attribute]) -> IoResult<()> {
let mut count = 0;
for attr in attrs.iter() {
match attr.node.style {
ast::AttrOuter => {
try!(self.print_attribute(attr));
count += 1;
}
_ => {/* fallthrough */ }
}
}
if count > 0 {
try!(self.hardbreak_if_not_bol());
}
Ok(())
}
pub fn print_inner_attributes(&mut self,
attrs: &[ast::Attribute]) -> IoResult<()> {
let mut count = 0;
for attr in attrs.iter() {
match attr.node.style {
ast::AttrInner => {
try!(self.print_attribute(attr));
count += 1;
}
_ => {/* fallthrough */ }
}
}
if count > 0 {
try!(self.hardbreak_if_not_bol());
}
Ok(())
}
pub fn print_attribute(&mut self, attr: &ast::Attribute) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(attr.span.lo));
if attr.node.is_sugared_doc {
word(&mut self.s, attr.value_str().unwrap().get())
} else {
match attr.node.style {
ast::AttrInner => try!(word(&mut self.s, "#![")),
ast::AttrOuter => try!(word(&mut self.s, "#[")),
}
try!(self.print_meta_item(attr.meta()));
word(&mut self.s, "]")
}
}
pub fn print_stmt(&mut self, st: &ast::Stmt) -> IoResult<()> {
try!(self.maybe_print_comment(st.span.lo));
match st.node {
ast::StmtDecl(decl, _) => {
try!(self.print_decl(decl));
}
ast::StmtExpr(expr, _) => {
try!(self.space_if_not_bol());
try!(self.print_expr(expr));
}
ast::StmtSemi(expr, _) => {
try!(self.space_if_not_bol());
try!(self.print_expr(expr));
try!(word(&mut self.s, ";"));
}
ast::StmtMac(ref mac, semi) => {
try!(self.space_if_not_bol());
try!(self.print_mac(mac));
if semi {
try!(word(&mut self.s, ";"));
}
}
}
if parse::classify::stmt_ends_with_semi(st) {
try!(word(&mut self.s, ";"));
}
self.maybe_print_trailing_comment(st.span, None)
}
pub fn print_block(&mut self, blk: &ast::Block) -> IoResult<()> {
self.print_block_with_attrs(blk, &[])
}
pub fn print_block_unclosed(&mut self, blk: &ast::Block) -> IoResult<()> {
self.print_block_unclosed_indent(blk, indent_unit)
}
pub fn print_block_unclosed_indent(&mut self, blk: &ast::Block,
indented: uint) -> IoResult<()> {
self.print_block_maybe_unclosed(blk, indented, &[], false)
}
pub fn print_block_with_attrs(&mut self,
blk: &ast::Block,
attrs: &[ast::Attribute]) -> IoResult<()> {
self.print_block_maybe_unclosed(blk, indent_unit, attrs, true)
}
pub fn print_block_maybe_unclosed(&mut self,
blk: &ast::Block,
indented: uint,
attrs: &[ast::Attribute],
close_box: bool) -> IoResult<()> {
match blk.rules {
ast::UnsafeBlock(..) => try!(self.word_space("unsafe")),
ast::DefaultBlock => ()
}
try!(self.maybe_print_comment(blk.span.lo));
try!(self.ann.pre(self, NodeBlock(blk)));
try!(self.bopen());
try!(self.print_inner_attributes(attrs));
for vi in blk.view_items.iter() {
try!(self.print_view_item(vi));
}
for st in blk.stmts.iter() {
try!(self.print_stmt(*st));
}
match blk.expr {
Some(expr) => {
try!(self.space_if_not_bol());
try!(self.print_expr(expr));
try!(self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi)));
}
_ => ()
}
try!(self.bclose_maybe_open(blk.span, indented, close_box));
self.ann.post(self, NodeBlock(blk))
}
fn print_else(&mut self, els: Option<@ast::Expr>) -> IoResult<()> {
match els {
Some(_else) => {
match _else.node {
// "another else-if"
ast::ExprIf(i, t, e) => {
try!(self.cbox(indent_unit - 1u));
try!(self.ibox(0u));
try!(word(&mut self.s, " else if "));
try!(self.print_expr(i));
try!(space(&mut self.s));
try!(self.print_block(t));
self.print_else(e)
}
// "final else"
ast::ExprBlock(b) => {
try!(self.cbox(indent_unit - 1u));
try!(self.ibox(0u));
try!(word(&mut self.s, " else "));
self.print_block(b)
}
// BLEAH, constraints would be great here
_ => {
fail!("print_if saw if with weird alternative");
}
}
}
_ => Ok(())
}
}
pub fn print_if(&mut self, test: &ast::Expr, blk: &ast::Block,
elseopt: Option<@ast::Expr>, chk: bool) -> IoResult<()> {
try!(self.head("if"));
if chk { try!(self.word_nbsp("check")); }
try!(self.print_expr(test));
try!(space(&mut self.s));
try!(self.print_block(blk));
self.print_else(elseopt)
}
pub fn print_mac(&mut self, m: &ast::Mac) -> IoResult<()> {
match m.node {
// I think it's reasonable to hide the ctxt here:
ast::MacInvocTT(ref pth, ref tts, _) => {
try!(self.print_path(pth, false));
try!(word(&mut self.s, "!"));
try!(self.popen());
try!(self.print_tts(&tts.as_slice()));
self.pclose()
}
}
}
pub fn print_expr_vstore(&mut self, t: ast::ExprVstore) -> IoResult<()> {
match t {
ast::ExprVstoreUniq => word(&mut self.s, "~"),
ast::ExprVstoreSlice => word(&mut self.s, "&"),
ast::ExprVstoreMutSlice => {
try!(word(&mut self.s, "&"));
word(&mut self.s, "mut")
}
}
}
fn print_call_post(&mut self, args: &[@ast::Expr]) -> IoResult<()> {
try!(self.popen());
try!(self.commasep_exprs(Inconsistent, args));
self.pclose()
}
pub fn print_expr(&mut self, expr: &ast::Expr) -> IoResult<()> {
try!(self.maybe_print_comment(expr.span.lo));
try!(self.ibox(indent_unit));
try!(self.ann.pre(self, NodeExpr(expr)));
match expr.node {
ast::ExprVstore(e, v) => {
try!(self.print_expr_vstore(v));
try!(self.print_expr(e));
},
ast::ExprBox(p, e) => {
try!(word(&mut self.s, "box"));
try!(word(&mut self.s, "("));
try!(self.print_expr(p));
try!(self.word_space(")"));
try!(self.print_expr(e));
}
ast::ExprVec(ref exprs) => {
try!(self.ibox(indent_unit));
try!(word(&mut self.s, "["));
try!(self.commasep_exprs(Inconsistent, exprs.as_slice()));
try!(word(&mut self.s, "]"));
try!(self.end());
}
ast::ExprRepeat(element, count) => {
try!(self.ibox(indent_unit));
try!(word(&mut self.s, "["));
try!(self.print_expr(element));
try!(word(&mut self.s, ","));
try!(word(&mut self.s, ".."));
try!(self.print_expr(count));
try!(word(&mut self.s, "]"));
try!(self.end());
}
ast::ExprStruct(ref path, ref fields, wth) => {
try!(self.print_path(path, true));
try!(word(&mut self.s, "{"));
try!(self.commasep_cmnt(
Consistent,
fields.as_slice(),
|s, field| {
try!(s.ibox(indent_unit));
try!(s.print_ident(field.ident.node));
try!(s.word_space(":"));
try!(s.print_expr(field.expr));
s.end()
},
|f| f.span));
match wth {
Some(expr) => {
try!(self.ibox(indent_unit));
if !fields.is_empty() {
try!(word(&mut self.s, ","));
try!(space(&mut self.s));
}
try!(word(&mut self.s, ".."));
try!(self.print_expr(expr));
try!(self.end());
}
_ => try!(word(&mut self.s, ","))
}
try!(word(&mut self.s, "}"));
}
ast::ExprTup(ref exprs) => {
try!(self.popen());
try!(self.commasep_exprs(Inconsistent, exprs.as_slice()));
if exprs.len() == 1 {
try!(word(&mut self.s, ","));
}
try!(self.pclose());
}
ast::ExprCall(func, ref args) => {
try!(self.print_expr(func));
try!(self.print_call_post(args.as_slice()));
}
ast::ExprMethodCall(ident, ref tys, ref args) => {
let base_args = args.slice_from(1);
try!(self.print_expr(*args.get(0)));
try!(word(&mut self.s, "."));
try!(self.print_ident(ident));
if tys.len() > 0u {
try!(word(&mut self.s, "::<"));
try!(self.commasep(Inconsistent, tys.as_slice(),
|s, ty| s.print_type_ref(ty)));
try!(word(&mut self.s, ">"));
}
try!(self.print_call_post(base_args));
}
ast::ExprBinary(op, lhs, rhs) => {
try!(self.print_expr(lhs));
try!(space(&mut self.s));
try!(self.word_space(ast_util::binop_to_str(op)));
try!(self.print_expr(rhs));
}
ast::ExprUnary(op, expr) => {
try!(word(&mut self.s, ast_util::unop_to_str(op)));
try!(self.print_expr(expr));
}
ast::ExprAddrOf(m, expr) => {
try!(word(&mut self.s, "&"));
try!(self.print_mutability(m));
// Avoid `& &e` => `&&e`.
match (m, &expr.node) {
(ast::MutImmutable, &ast::ExprAddrOf(..)) => try!(space(&mut self.s)),
_ => { }
}
try!(self.print_expr(expr));
}
ast::ExprLit(lit) => try!(self.print_literal(lit)),
ast::ExprCast(expr, ty) => {
try!(self.print_expr(expr));
try!(space(&mut self.s));
try!(self.word_space("as"));
try!(self.print_type(ty));
}
ast::ExprIf(test, blk, elseopt) => {
try!(self.print_if(test, blk, elseopt, false));
}
ast::ExprWhile(test, blk) => {
try!(self.head("while"));
try!(self.print_expr(test));
try!(space(&mut self.s));
try!(self.print_block(blk));
}
ast::ExprForLoop(pat, iter, blk, opt_ident) => {
for ident in opt_ident.iter() {
try!(word(&mut self.s, "'"));
try!(self.print_ident(*ident));
try!(self.word_space(":"));
}
try!(self.head("for"));
try!(self.print_pat(pat));
try!(space(&mut self.s));
try!(self.word_space("in"));
try!(self.print_expr(iter));
try!(space(&mut self.s));
try!(self.print_block(blk));
}
ast::ExprLoop(blk, opt_ident) => {
for ident in opt_ident.iter() {
try!(word(&mut self.s, "'"));
try!(self.print_ident(*ident));
try!(self.word_space(":"));
}
try!(self.head("loop"));
try!(space(&mut self.s));
try!(self.print_block(blk));
}
ast::ExprMatch(expr, ref arms) => {
try!(self.cbox(indent_unit));
try!(self.ibox(4));
try!(self.word_nbsp("match"));
try!(self.print_expr(expr));
try!(space(&mut self.s));
try!(self.bopen());
let len = arms.len();
for (i, arm) in arms.iter().enumerate() {
try!(space(&mut self.s));
try!(self.cbox(indent_unit));
try!(self.ibox(0u));
let mut first = true;
for p in arm.pats.iter() {
if first {
first = false;
} else {
try!(space(&mut self.s));
try!(self.word_space("|"));
}
try!(self.print_pat(*p));
}
try!(space(&mut self.s));
match arm.guard {
Some(e) => {
try!(self.word_space("if"));
try!(self.print_expr(e));
try!(space(&mut self.s));
}
None => ()
}
try!(self.word_space("=>"));
match arm.body.node {
ast::ExprBlock(blk) => {
// the block will close the pattern's ibox
try!(self.print_block_unclosed_indent(blk, indent_unit));
}
_ => {
try!(self.end()); // close the ibox for the pattern
try!(self.print_expr(arm.body));
}
}
if !expr_is_simple_block(expr)
&& i < len - 1 {
try!(word(&mut self.s, ","));
}
try!(self.end()); // close enclosing cbox
}
try!(self.bclose_(expr.span, indent_unit));
}
ast::ExprFnBlock(decl, body) => {
// in do/for blocks we don't want to show an empty
// argument list, but at this point we don't know which
// we are inside.
//
// if !decl.inputs.is_empty() {
try!(self.print_fn_block_args(decl));
try!(space(&mut self.s));
// }
if !body.stmts.is_empty() || !body.expr.is_some() {
try!(self.print_block_unclosed(body));
} else {
// we extract the block, so as not to create another set of boxes
match body.expr.unwrap().node {
ast::ExprBlock(blk) => {
try!(self.print_block_unclosed(blk));
}
_ => {
// this is a bare expression
try!(self.print_expr(body.expr.unwrap()));
try!(self.end()); // need to close a box
}
}
}
// a box will be closed by print_expr, but we didn't want an overall
// wrapper so we closed the corresponding opening. so create an
// empty box to satisfy the close.
try!(self.ibox(0));
}
ast::ExprProc(decl, body) => {
// in do/for blocks we don't want to show an empty
// argument list, but at this point we don't know which
// we are inside.
//
// if !decl.inputs.is_empty() {
try!(self.print_proc_args(decl));
try!(space(&mut self.s));
// }
assert!(body.stmts.is_empty());
assert!(body.expr.is_some());
// we extract the block, so as not to create another set of boxes
match body.expr.unwrap().node {
ast::ExprBlock(blk) => {
try!(self.print_block_unclosed(blk));
}
_ => {
// this is a bare expression
try!(self.print_expr(body.expr.unwrap()));
try!(self.end()); // need to close a box
}
}
// a box will be closed by print_expr, but we didn't want an overall
// wrapper so we closed the corresponding opening. so create an
// empty box to satisfy the close.
try!(self.ibox(0));
}
ast::ExprBlock(blk) => {
// containing cbox, will be closed by print-block at }
try!(self.cbox(indent_unit));
// head-box, will be closed by print-block after {
try!(self.ibox(0u));
try!(self.print_block(blk));
}
ast::ExprAssign(lhs, rhs) => {
try!(self.print_expr(lhs));
try!(space(&mut self.s));
try!(self.word_space("="));
try!(self.print_expr(rhs));
}
ast::ExprAssignOp(op, lhs, rhs) => {
try!(self.print_expr(lhs));
try!(space(&mut self.s));
try!(word(&mut self.s, ast_util::binop_to_str(op)));
try!(self.word_space("="));
try!(self.print_expr(rhs));
}
ast::ExprField(expr, id, ref tys) => {
try!(self.print_expr(expr));
try!(word(&mut self.s, "."));
try!(self.print_ident(id));
if tys.len() > 0u {
try!(word(&mut self.s, "::<"));
try!(self.commasep(
Inconsistent, tys.as_slice(),
|s, ty| s.print_type_ref(ty)));
try!(word(&mut self.s, ">"));
}
}
ast::ExprIndex(expr, index) => {
try!(self.print_expr(expr));
try!(word(&mut self.s, "["));
try!(self.print_expr(index));
try!(word(&mut self.s, "]"));
}
ast::ExprPath(ref path) => try!(self.print_path(path, true)),
ast::ExprBreak(opt_ident) => {
try!(word(&mut self.s, "break"));
try!(space(&mut self.s));
for ident in opt_ident.iter() {
try!(word(&mut self.s, "'"));
try!(self.print_ident(*ident));
try!(space(&mut self.s));
}
}
ast::ExprAgain(opt_ident) => {
try!(word(&mut self.s, "continue"));
try!(space(&mut self.s));
for ident in opt_ident.iter() {
try!(word(&mut self.s, "'"));
try!(self.print_ident(*ident));
try!(space(&mut self.s))
}
}
ast::ExprRet(result) => {
try!(word(&mut self.s, "return"));
match result {
Some(expr) => {
try!(word(&mut self.s, " "));
try!(self.print_expr(expr));
}
_ => ()
}
}
ast::ExprInlineAsm(ref a) => {
if a.volatile {
try!(word(&mut self.s, "__volatile__ asm!"));
} else {
try!(word(&mut self.s, "asm!"));
}
try!(self.popen());
try!(self.print_string(a.asm.get(), a.asm_str_style));
try!(self.word_space(":"));
for &(ref co, o) in a.outputs.iter() {
try!(self.print_string(co.get(), ast::CookedStr));
try!(self.popen());
try!(self.print_expr(o));
try!(self.pclose());
try!(self.word_space(","));
}
try!(self.word_space(":"));
for &(ref co, o) in a.inputs.iter() {
try!(self.print_string(co.get(), ast::CookedStr));
try!(self.popen());
try!(self.print_expr(o));
try!(self.pclose());
try!(self.word_space(","));
}
try!(self.word_space(":"));
try!(self.print_string(a.clobbers.get(), ast::CookedStr));
try!(self.pclose());
}
ast::ExprMac(ref m) => try!(self.print_mac(m)),
ast::ExprParen(e) => {
try!(self.popen());
try!(self.print_expr(e));
try!(self.pclose());
}
}
try!(self.ann.post(self, NodeExpr(expr)));
self.end()
}
pub fn print_local_decl(&mut self, loc: &ast::Local) -> IoResult<()> {
try!(self.print_pat(loc.pat));
match loc.ty.node {
ast::TyInfer => Ok(()),
_ => {
try!(self.word_space(":"));
self.print_type(loc.ty)
}
}
}
pub fn print_decl(&mut self, decl: &ast::Decl) -> IoResult<()> {
try!(self.maybe_print_comment(decl.span.lo));
match decl.node {
ast::DeclLocal(loc) => {
try!(self.space_if_not_bol());
try!(self.ibox(indent_unit));
try!(self.word_nbsp("let"));
try!(self.ibox(indent_unit));
try!(self.print_local_decl(loc));
try!(self.end());
match loc.init {
Some(init) => {
try!(self.nbsp());
try!(self.word_space("="));
try!(self.print_expr(init));
}
_ => {}
}
self.end()
}
ast::DeclItem(item) => self.print_item(item)
}
}
pub fn print_ident(&mut self, ident: ast::Ident) -> IoResult<()> {
word(&mut self.s, token::get_ident(ident).get())
}
pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> {
word(&mut self.s, token::get_name(name).get())
}
pub fn print_for_decl(&mut self, loc: &ast::Local,
coll: &ast::Expr) -> IoResult<()> {
try!(self.print_local_decl(loc));
try!(space(&mut self.s));
try!(self.word_space("in"));
self.print_expr(coll)
}
fn print_path_(&mut self,
path: &ast::Path,
colons_before_params: bool,
opt_bounds: &Option<OwnedSlice<ast::TyParamBound>>)
-> IoResult<()> {
try!(self.maybe_print_comment(path.span.lo));
if path.global {
try!(word(&mut self.s, "::"));
}
let mut first = true;
for segment in path.segments.iter() {
if first {
first = false
} else {
try!(word(&mut self.s, "::"))
}
try!(self.print_ident(segment.identifier));
if !segment.lifetimes.is_empty() || !segment.types.is_empty() {
if colons_before_params {
try!(word(&mut self.s, "::"))
}
try!(word(&mut self.s, "<"));
let mut comma = false;
for lifetime in segment.lifetimes.iter() {
if comma {
try!(self.word_space(","))
}
try!(self.print_lifetime(lifetime));
comma = true;
}
if !segment.types.is_empty() {
if comma {
try!(self.word_space(","))
}
try!(self.commasep(
Inconsistent,
segment.types.as_slice(),
|s, ty| s.print_type_ref(ty)));
}
try!(word(&mut self.s, ">"))
}
}
match *opt_bounds {
None => Ok(()),
Some(ref bounds) => self.print_bounds(&None, bounds, true),
}
}
fn print_path(&mut self, path: &ast::Path,
colons_before_params: bool) -> IoResult<()> {
self.print_path_(path, colons_before_params, &None)
}
fn print_bounded_path(&mut self, path: &ast::Path,
bounds: &Option<OwnedSlice<ast::TyParamBound>>)
-> IoResult<()> {
self.print_path_(path, false, bounds)
}
pub fn print_pat(&mut self, pat: &ast::Pat) -> IoResult<()> {
try!(self.maybe_print_comment(pat.span.lo));
try!(self.ann.pre(self, NodePat(pat)));
/* Pat isn't normalized, but the beauty of it
is that it doesn't matter */
match pat.node {
ast::PatWild => try!(word(&mut self.s, "_")),
ast::PatWildMulti => try!(word(&mut self.s, "..")),
ast::PatIdent(binding_mode, ref path, sub) => {
match binding_mode {
ast::BindByRef(mutbl) => {
try!(self.word_nbsp("ref"));
try!(self.print_mutability(mutbl));
}
ast::BindByValue(ast::MutImmutable) => {}
ast::BindByValue(ast::MutMutable) => {
try!(self.word_nbsp("mut"));
}
}
try!(self.print_path(path, true));
match sub {
Some(p) => {
try!(word(&mut self.s, "@"));
try!(self.print_pat(p));
}
None => ()
}
}
ast::PatEnum(ref path, ref args_) => {
try!(self.print_path(path, true));
match *args_ {
None => try!(word(&mut self.s, "(..)")),
Some(ref args) => {
if !args.is_empty() {
try!(self.popen());
try!(self.commasep(Inconsistent, args.as_slice(),
|s, &p| s.print_pat(p)));
try!(self.pclose());
}
}
}
}
ast::PatStruct(ref path, ref fields, etc) => {
try!(self.print_path(path, true));
try!(word(&mut self.s, "{"));
try!(self.commasep_cmnt(
Consistent, fields.as_slice(),
|s, f| {
try!(s.cbox(indent_unit));
try!(s.print_ident(f.ident));
try!(s.word_space(":"));
try!(s.print_pat(f.pat));
s.end()
},
|f| f.pat.span));
if etc {
if fields.len() != 0u { try!(self.word_space(",")); }
try!(word(&mut self.s, ".."));
}
try!(word(&mut self.s, "}"));
}
ast::PatTup(ref elts) => {
try!(self.popen());
try!(self.commasep(Inconsistent,
elts.as_slice(),
|s, &p| s.print_pat(p)));
if elts.len() == 1 {
try!(word(&mut self.s, ","));
}
try!(self.pclose());
}
ast::PatUniq(inner) => {
try!(word(&mut self.s, "~"));
try!(self.print_pat(inner));
}
ast::PatRegion(inner) => {
try!(word(&mut self.s, "&"));
try!(self.print_pat(inner));
}
ast::PatLit(e) => try!(self.print_expr(e)),
ast::PatRange(begin, end) => {
try!(self.print_expr(begin));
try!(space(&mut self.s));
try!(word(&mut self.s, ".."));
try!(self.print_expr(end));
}
ast::PatVec(ref before, slice, ref after) => {
try!(word(&mut self.s, "["));
try!(self.commasep(Inconsistent,
before.as_slice(),
|s, &p| s.print_pat(p)));
for &p in slice.iter() {
if !before.is_empty() { try!(self.word_space(",")); }
match *p {
ast::Pat { node: ast::PatWildMulti, .. } => {
// this case is handled by print_pat
}
_ => try!(word(&mut self.s, "..")),
}
try!(self.print_pat(p));
if !after.is_empty() { try!(self.word_space(",")); }
}
try!(self.commasep(Inconsistent,
after.as_slice(),
|s, &p| s.print_pat(p)));
try!(word(&mut self.s, "]"));
}
}
self.ann.post(self, NodePat(pat))
}
// Returns whether it printed anything
fn print_explicit_self(&mut self,
explicit_self: ast::ExplicitSelf_,
mutbl: ast::Mutability) -> IoResult<bool> {
try!(self.print_mutability(mutbl));
match explicit_self {
ast::SelfStatic => { return Ok(false); }
ast::SelfValue => {
try!(word(&mut self.s, "self"));
}
ast::SelfUniq => {
try!(word(&mut self.s, "~self"));
}
ast::SelfRegion(ref lt, m) => {
try!(word(&mut self.s, "&"));
try!(self.print_opt_lifetime(lt));
try!(self.print_mutability(m));
try!(word(&mut self.s, "self"));
}
}
return Ok(true);
}
pub fn print_fn(&mut self,
decl: &ast::FnDecl,
fn_style: Option<ast::FnStyle>,
abi: abi::Abi,
name: ast::Ident,
generics: &ast::Generics,
opt_explicit_self: Option<ast::ExplicitSelf_>,
vis: ast::Visibility) -> IoResult<()> {
try!(self.head(""));
try!(self.print_fn_header_info(opt_explicit_self, fn_style, abi, vis));
try!(self.nbsp());
try!(self.print_ident(name));
try!(self.print_generics(generics));
self.print_fn_args_and_ret(decl, opt_explicit_self)
}
pub fn print_fn_args(&mut self, decl: &ast::FnDecl,
opt_explicit_self: Option<ast::ExplicitSelf_>)
-> IoResult<()> {
// It is unfortunate to duplicate the commasep logic, but we want the
// self type and the args all in the same box.
try!(self.rbox(0u, Inconsistent));
let mut first = true;
for &explicit_self in opt_explicit_self.iter() {
let m = match explicit_self {
ast::SelfStatic => ast::MutImmutable,
_ => match decl.inputs.get(0).pat.node {
ast::PatIdent(ast::BindByValue(m), _, _) => m,
_ => ast::MutImmutable
}
};
first = !try!(self.print_explicit_self(explicit_self, m));
}
// HACK(eddyb) ignore the separately printed self argument.
let args = if first {
decl.inputs.as_slice()
} else {
decl.inputs.slice_from(1)
};
for arg in args.iter() {
if first { first = false; } else { try!(self.word_space(",")); }
try!(self.print_arg(arg));
}
self.end()
}
pub fn print_fn_args_and_ret(&mut self, decl: &ast::FnDecl,
opt_explicit_self: Option<ast::ExplicitSelf_>)
-> IoResult<()> {
try!(self.popen());
try!(self.print_fn_args(decl, opt_explicit_self));
if decl.variadic {
try!(word(&mut self.s, ", ..."));
}
try!(self.pclose());
try!(self.maybe_print_comment(decl.output.span.lo));
match decl.output.node {
ast::TyNil => Ok(()),
_ => {
try!(self.space_if_not_bol());
try!(self.word_space("->"));
self.print_type(decl.output)
}
}
}
pub fn print_fn_block_args(&mut self,
decl: &ast::FnDecl) -> IoResult<()> {
try!(word(&mut self.s, "|"));
try!(self.print_fn_args(decl, None));
try!(word(&mut self.s, "|"));
match decl.output.node {
ast::TyInfer => {}
_ => {
try!(self.space_if_not_bol());
try!(self.word_space("->"));
try!(self.print_type(decl.output));
}
}
self.maybe_print_comment(decl.output.span.lo)
}
pub fn print_proc_args(&mut self, decl: &ast::FnDecl) -> IoResult<()> {
try!(word(&mut self.s, "proc"));
try!(word(&mut self.s, "("));
try!(self.print_fn_args(decl, None));
try!(word(&mut self.s, ")"));
match decl.output.node {
ast::TyInfer => {}
_ => {
try!(self.space_if_not_bol());
try!(self.word_space("->"));
try!(self.print_type(decl.output));
}
}
self.maybe_print_comment(decl.output.span.lo)
}
pub fn print_bounds(&mut self,
region: &Option<ast::Lifetime>,
bounds: &OwnedSlice<ast::TyParamBound>,
print_colon_anyway: bool) -> IoResult<()> {
if !bounds.is_empty() || region.is_some() {
try!(word(&mut self.s, ":"));
let mut first = true;
match *region {
Some(ref lt) => {
let token = token::get_name(lt.name);
if token.get() != "static" {
try!(self.nbsp());
first = false;
try!(self.print_lifetime(lt));
}
}
None => {}
}
for bound in bounds.iter() {
try!(self.nbsp());
if first {
first = false;
} else {
try!(self.word_space("+"));
}
try!(match *bound {
TraitTyParamBound(ref tref) => self.print_trait_ref(tref),
RegionTyParamBound => word(&mut self.s, "'static"),
})
}
Ok(())
} else if print_colon_anyway {
word(&mut self.s, ":")
} else {
Ok(())
}
}
pub fn print_lifetime(&mut self,
lifetime: &ast::Lifetime) -> IoResult<()> {
try!(word(&mut self.s, "'"));
self.print_name(lifetime.name)
}
pub fn print_generics(&mut self,
generics: &ast::Generics) -> IoResult<()> {
let total = generics.lifetimes.len() + generics.ty_params.len();
if total > 0 {
try!(word(&mut self.s, "<"));
let mut ints = Vec::new();
for i in range(0u, total) {
ints.push(i);
}
try!(self.commasep(
Inconsistent, ints.as_slice(),
|s, &idx| {
if idx < generics.lifetimes.len() {
let lifetime = generics.lifetimes.get(idx);
s.print_lifetime(lifetime)
} else {
let idx = idx - generics.lifetimes.len();
let param = generics.ty_params.get(idx);
if param.sized == ast::DynSize {
try!(s.word_space("type"));
}
try!(s.print_ident(param.ident));
try!(s.print_bounds(&None, ¶m.bounds, false));
match param.default {
Some(default) => {
try!(space(&mut s.s));
try!(s.word_space("="));
s.print_type(default)
}
_ => Ok(())
}
}
}));
word(&mut self.s, ">")
} else {
Ok(())
}
}
pub fn print_meta_item(&mut self, item: &ast::MetaItem) -> IoResult<()> {
try!(self.ibox(indent_unit));
match item.node {
ast::MetaWord(ref name) => {
try!(word(&mut self.s, name.get()));
}
ast::MetaNameValue(ref name, ref value) => {
try!(self.word_space(name.get()));
try!(self.word_space("="));
try!(self.print_literal(value));
}
ast::MetaList(ref name, ref items) => {
try!(word(&mut self.s, name.get()));
try!(self.popen());
try!(self.commasep(Consistent,
items.as_slice(),
|s, &i| s.print_meta_item(i)));
try!(self.pclose());
}
}
self.end()
}
pub fn print_view_path(&mut self, vp: &ast::ViewPath) -> IoResult<()> {
match vp.node {
ast::ViewPathSimple(ident, ref path, _) => {
// FIXME(#6993) can't compare identifiers directly here
if path.segments.last().unwrap().identifier.name != ident.name {
try!(self.print_ident(ident));
try!(space(&mut self.s));
try!(self.word_space("="));
}
self.print_path(path, false)
}
ast::ViewPathGlob(ref path, _) => {
try!(self.print_path(path, false));
word(&mut self.s, "::*")
}
ast::ViewPathList(ref path, ref idents, _) => {
if path.segments.is_empty() {
try!(word(&mut self.s, "{"));
} else {
try!(self.print_path(path, false));
try!(word(&mut self.s, "::{"));
}
try!(self.commasep(Inconsistent, idents.as_slice(), |s, w| {
s.print_ident(w.node.name)
}));
word(&mut self.s, "}")
}
}
}
pub fn print_view_paths(&mut self,
vps: &[@ast::ViewPath]) -> IoResult<()> {
self.commasep(Inconsistent, vps, |s, &vp| s.print_view_path(vp))
}
pub fn print_view_item(&mut self, item: &ast::ViewItem) -> IoResult<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(item.span.lo));
try!(self.print_outer_attributes(item.attrs.as_slice()));
try!(self.print_visibility(item.vis));
match item.node {
ast::ViewItemExternCrate(id, ref optional_path, _) => {
try!(self.head("extern crate"));
try!(self.print_ident(id));
for &(ref p, style) in optional_path.iter() {
try!(space(&mut self.s));
try!(word(&mut self.s, "="));
try!(space(&mut self.s));
try!(self.print_string(p.get(), style));
}
}
ast::ViewItemUse(ref vps) => {
try!(self.head("use"));
try!(self.print_view_paths(vps.as_slice()));
}
}
try!(word(&mut self.s, ";"));
try!(self.end()); // end inner head-block
self.end() // end outer head-block
}
pub fn print_mutability(&mut self,
mutbl: ast::Mutability) -> IoResult<()> {
match mutbl {
ast::MutMutable => self.word_nbsp("mut"),
ast::MutImmutable => Ok(()),
}
}
pub fn print_mt(&mut self, mt: &ast::MutTy) -> IoResult<()> {
try!(self.print_mutability(mt.mutbl));
self.print_type(mt.ty)
}
pub fn print_arg(&mut self, input: &ast::Arg) -> IoResult<()> {
try!(self.ibox(indent_unit));
match input.ty.node {
ast::TyInfer => try!(self.print_pat(input.pat)),
_ => {
match input.pat.node {
ast::PatIdent(_, ref path, _) if
path.segments.len() == 1 &&
path.segments.get(0).identifier.name ==
parse::token::special_idents::invalid.name => {
// Do nothing.
}
_ => {
try!(self.print_pat(input.pat));
try!(word(&mut self.s, ":"));
try!(space(&mut self.s));
}
}
try!(self.print_type(input.ty));
}
}
self.end()
}
pub fn print_ty_fn(&mut self,
opt_abi: Option<abi::Abi>,
opt_sigil: Option<char>,
opt_region: &Option<ast::Lifetime>,
fn_style: ast::FnStyle,
onceness: ast::Onceness,
decl: &ast::FnDecl,
id: Option<ast::Ident>,
opt_bounds: &Option<OwnedSlice<ast::TyParamBound>>,
generics: Option<&ast::Generics>,
opt_explicit_self: Option<ast::ExplicitSelf_>)
-> IoResult<()> {
try!(self.ibox(indent_unit));
// Duplicates the logic in `print_fn_header_info()`. This is because that
// function prints the sigil in the wrong place. That should be fixed.
if opt_sigil == Some('~') && onceness == ast::Once {
try!(word(&mut self.s, "proc"));
} else if opt_sigil == Some('&') {
try!(self.print_extern_opt_abi(opt_abi));
try!(self.print_fn_style(fn_style));
try!(self.print_onceness(onceness));
} else {
assert!(opt_sigil.is_none());
try!(self.print_opt_abi_and_extern_if_nondefault(opt_abi));
try!(self.print_fn_style(fn_style));
try!(self.print_onceness(onceness));
try!(word(&mut self.s, "fn"));
}
match id {
Some(id) => {
try!(word(&mut self.s, " "));
try!(self.print_ident(id));
}
_ => ()
}
match generics { Some(g) => try!(self.print_generics(g)), _ => () }
try!(zerobreak(&mut self.s));
if opt_sigil == Some('&') {
try!(word(&mut self.s, "|"));
} else {
try!(self.popen());
}
try!(self.print_fn_args(decl, opt_explicit_self));
if opt_sigil == Some('&') {
try!(word(&mut self.s, "|"));
} else {
if decl.variadic {
try!(word(&mut self.s, ", ..."));
}
try!(self.pclose());
}
opt_bounds.as_ref().map(|bounds| {
self.print_bounds(opt_region, bounds, true)
});
try!(self.maybe_print_comment(decl.output.span.lo));
match decl.output.node {
ast::TyNil => {}
_ => {
try!(self.space_if_not_bol());
try!(self.ibox(indent_unit));
try!(self.word_space("->"));
if decl.cf == ast::NoReturn {
try!(self.word_nbsp("!"));
} else {
try!(self.print_type(decl.output));
}
try!(self.end());
}
}
self.end()
}
pub fn maybe_print_trailing_comment(&mut self, span: codemap::Span,
next_pos: Option<BytePos>)
-> IoResult<()> {
let cm = match self.cm {
Some(cm) => cm,
_ => return Ok(())
};
match self.next_comment() {
Some(ref cmnt) => {
if (*cmnt).style != comments::Trailing { return Ok(()) }
let span_line = cm.lookup_char_pos(span.hi);
let comment_line = cm.lookup_char_pos((*cmnt).pos);
let mut next = (*cmnt).pos + BytePos(1);
match next_pos { None => (), Some(p) => next = p }
if span.hi < (*cmnt).pos && (*cmnt).pos < next &&
span_line.line == comment_line.line {
try!(self.print_comment(cmnt));
self.cur_cmnt_and_lit.cur_cmnt += 1u;
}
}
_ => ()
}
Ok(())
}
pub fn print_remaining_comments(&mut self) -> IoResult<()> {
// If there aren't any remaining comments, then we need to manually
// make sure there is a line break at the end.
if self.next_comment().is_none() {
try!(hardbreak(&mut self.s));
}
loop {
match self.next_comment() {
Some(ref cmnt) => {
try!(self.print_comment(cmnt));
self.cur_cmnt_and_lit.cur_cmnt += 1u;
}
_ => break
}
}
Ok(())
}
pub fn print_literal(&mut self, lit: &ast::Lit) -> IoResult<()> {
try!(self.maybe_print_comment(lit.span.lo));
match self.next_lit(lit.span.lo) {
Some(ref ltrl) => {
return word(&mut self.s, (*ltrl).lit);
}
_ => ()
}
match lit.node {
ast::LitStr(ref st, style) => self.print_string(st.get(), style),
ast::LitChar(ch) => {
let mut res = StrBuf::from_str("'");
char::from_u32(ch).unwrap().escape_default(|c| res.push_char(c));
res.push_char('\'');
word(&mut self.s, res.into_owned())
}
ast::LitInt(i, t) => {
word(&mut self.s, ast_util::int_ty_to_str(t, Some(i)))
}
ast::LitUint(u, t) => {
word(&mut self.s, ast_util::uint_ty_to_str(t, Some(u)))
}
ast::LitIntUnsuffixed(i) => {
word(&mut self.s, format!("{}", i))
}
ast::LitFloat(ref f, t) => {
word(&mut self.s, f.get() + ast_util::float_ty_to_str(t))
}
ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()),
ast::LitNil => word(&mut self.s, "()"),
ast::LitBool(val) => {
if val { word(&mut self.s, "true") } else { word(&mut self.s, "false") }
}
ast::LitBinary(ref arr) => {
try!(self.ibox(indent_unit));
try!(word(&mut self.s, "["));
try!(self.commasep_cmnt(Inconsistent, arr.as_slice(),
|s, u| word(&mut s.s, format!("{}", *u)),
|_| lit.span));
try!(word(&mut self.s, "]"));
self.end()
}
}
}
pub fn next_lit(&mut self, pos: BytePos) -> Option<comments::Literal> {
match self.literals {
Some(ref lits) => {
while self.cur_cmnt_and_lit.cur_lit < lits.len() {
let ltrl = (*(*lits).get(self.cur_cmnt_and_lit.cur_lit)).clone();
if ltrl.pos > pos { return None; }
self.cur_cmnt_and_lit.cur_lit += 1u;
if ltrl.pos == pos { return Some(ltrl); }
}
None
}
_ => None
}
}
pub fn maybe_print_comment(&mut self, pos: BytePos) -> IoResult<()> {
loop {
match self.next_comment() {
Some(ref cmnt) => {
if (*cmnt).pos < pos {
try!(self.print_comment(cmnt));
self.cur_cmnt_and_lit.cur_cmnt += 1u;
} else { break; }
}
_ => break
}
}
Ok(())
}
pub fn print_comment(&mut self,
cmnt: &comments::Comment) -> IoResult<()> {
match cmnt.style {
comments::Mixed => {
assert_eq!(cmnt.lines.len(), 1u);
try!(zerobreak(&mut self.s));
try!(word(&mut self.s, *cmnt.lines.get(0)));
zerobreak(&mut self.s)
}
comments::Isolated => {
try!(self.hardbreak_if_not_bol());
for line in cmnt.lines.iter() {
// Don't print empty lines because they will end up as trailing
// whitespace
if !line.is_empty() {
try!(word(&mut self.s, *line));
}
try!(hardbreak(&mut self.s));
}
Ok(())
}
comments::Trailing => {
try!(word(&mut self.s, " "));
if cmnt.lines.len() == 1u {
try!(word(&mut self.s, *cmnt.lines.get(0)));
hardbreak(&mut self.s)
} else {
try!(self.ibox(0u));
for line in cmnt.lines.iter() {
if !line.is_empty() {
try!(word(&mut self.s, *line));
}
try!(hardbreak(&mut self.s));
}
self.end()
}
}
comments::BlankLine => {
// We need to do at least one, possibly two hardbreaks.
let is_semi = match self.s.last_token() {
pp::String(s, _) => ";" == s,
_ => false
};
if is_semi || self.is_begin() || self.is_end() {
try!(hardbreak(&mut self.s));
}
hardbreak(&mut self.s)
}
}
}
pub fn print_string(&mut self, st: &str,
style: ast::StrStyle) -> IoResult<()> {
let st = match style {
ast::CookedStr => format!("\"{}\"", st.escape_default()),
ast::RawStr(n) => format!("r{delim}\"{string}\"{delim}",
delim="#".repeat(n), string=st)
};
word(&mut self.s, st)
}
pub fn next_comment(&mut self) -> Option<comments::Comment> {
match self.comments {
Some(ref cmnts) => {
if self.cur_cmnt_and_lit.cur_cmnt < cmnts.len() {
Some((*cmnts.get(self.cur_cmnt_and_lit.cur_cmnt)).clone())
} else {
None
}
}
_ => None
}
}
pub fn print_opt_fn_style(&mut self,
opt_fn_style: Option<ast::FnStyle>) -> IoResult<()> {
match opt_fn_style {
Some(fn_style) => self.print_fn_style(fn_style),
None => Ok(())
}
}
pub fn print_opt_abi_and_extern_if_nondefault(&mut self,
opt_abi: Option<abi::Abi>)
-> IoResult<()> {
match opt_abi {
Some(abi::Rust) => Ok(()),
Some(abi) => {
try!(self.word_nbsp("extern"));
self.word_nbsp(abi.to_str())
}
None => Ok(())
}
}
pub fn print_extern_opt_abi(&mut self,
opt_abi: Option<abi::Abi>) -> IoResult<()> {
match opt_abi {
Some(abi) => {
try!(self.word_nbsp("extern"));
self.word_nbsp(abi.to_str())
}
None => Ok(())
}
}
pub fn print_fn_header_info(&mut self,
_opt_explicit_self: Option<ast::ExplicitSelf_>,
opt_fn_style: Option<ast::FnStyle>,
abi: abi::Abi,
vis: ast::Visibility) -> IoResult<()> {
try!(word(&mut self.s, visibility_qualified(vis, "")));
if abi != abi::Rust {
try!(self.word_nbsp("extern"));
try!(self.word_nbsp(abi.to_str()));
if opt_fn_style != Some(ast::ExternFn) {
try!(self.print_opt_fn_style(opt_fn_style));
}
} else {
try!(self.print_opt_fn_style(opt_fn_style));
}
word(&mut self.s, "fn")
}
pub fn print_fn_style(&mut self, s: ast::FnStyle) -> IoResult<()> {
match s {
ast::NormalFn => Ok(()),
ast::UnsafeFn => self.word_nbsp("unsafe"),
ast::ExternFn => self.word_nbsp("extern")
}
}
pub fn print_onceness(&mut self, o: ast::Onceness) -> IoResult<()> {
match o {
ast::Once => self.word_nbsp("once"),
ast::Many => Ok(())
}
}
}
#[cfg(test)]
mod test {
use super::*;
use ast;
use ast_util;
use codemap;
use parse::token;
#[test]
fn test_fun_to_str() {
let abba_ident = token::str_to_ident("abba");
let decl = ast::FnDecl {
inputs: Vec::new(),
output: ast::P(ast::Ty {id: 0,
node: ast::TyNil,
span: codemap::DUMMY_SP}),
cf: ast::Return,
variadic: false
};
let generics = ast_util::empty_generics();
assert_eq!(&fun_to_str(&decl, ast::NormalFn, abba_ident,
None, &generics),
&"fn abba()".to_owned());
}
#[test]
fn test_variant_to_str() {
let ident = token::str_to_ident("principal_skinner");
let var = codemap::respan(codemap::DUMMY_SP, ast::Variant_ {
name: ident,
attrs: Vec::new(),
// making this up as I go.... ?
kind: ast::TupleVariantKind(Vec::new()),
id: 0,
disr_expr: None,
vis: ast::Public,
});
let varstr = variant_to_str(&var);
assert_eq!(&varstr,&"pub principal_skinner".to_owned());
}
} | }
pub fn bclose_maybe_open (&mut self, span: codemap::Span, |
test-question-editor.component.ts | import { Component, OnInit } from '@angular/core';
import { SubjectsService } from 'src/app/services/contentServices/subjects.service';
import { TestQuestionsService } from 'src/app/services/contentServices/test-questions.service';
import { SignInService } from 'src/app/services/sign-in.service';
import { ActivatedRoute, Router } from '@angular/router';
import { environment } from 'src/environments/environment';
import { TestQuestion } from 'src/app/classes/TestQuestion';
import { NavigationServiceService } from 'src/app/services/navigation-service.service';
@Component({
selector: 'app-test-question-editor',
templateUrl: './test-question-editor.component.html',
styleUrls: ['./test-question-editor.component.css']
})
export class | implements OnInit {
// Ids of parent objects and question being editted.
public subjectid = null;
public topicid = null;
public testid = null;
public question$ = null;
public submitted: boolean = false; // Whether page has been submitted.
public errorMessage: string = null; // Error message if something goes wrong.
// Value of question / answer / imageUrl inputs.
public questionValue: string = '';
public answerValue: string = '';
public imageUrlValue: string = '';
public imageUrlValid: boolean = true;
constructor(
private subjectService: SubjectsService,
private testQuestionService: TestQuestionsService,
private signIn: SignInService,
private route: ActivatedRoute,
private router: Router,
public navService: NavigationServiceService
) { }
ngOnInit() {
// Get route params.
this.subjectid = this.route.snapshot.paramMap.get(environment.routeParams.subjectid);
this.topicid = this.route.snapshot.paramMap.get(environment.routeParams.topicid);
this.testid = this.route.snapshot.paramMap.get(environment.routeParams.testid);
let questionid = this.route.snapshot.paramMap.get(environment.routeParams.questionId);
// Set subject.
this.subjectService.setSubject(this.subjectid);
// Get user admin status.
this.signIn.userIsAdmin().subscribe((isAdmin) => {
// Redirect to test home if not admin.
if (!isAdmin) {
this.redirectToTestHome();
}
}, (err) => {
console.error('TestQuestion-Editor isAdmin Error:', err);
});
// Get question being editted.
this.testQuestionService.getTestQuestion(this.subjectid, this.topicid, this.testid, questionid)
.subscribe((questions: TestQuestion[]) => {
if (questions !== null && questions.length > 0) {
this.question$ = questions[0];
this.setPageValues(this.question$);
}
}, (err) => {
console.error('TestQuestion-Editor question$ Error:', err);
});
// Watch values for question / answer / imageUrl.
document.getElementById('questionText').addEventListener('input', (e) => {
this.questionValue = (<HTMLInputElement>e.target).value.trim();
});
document.getElementById('questionAnswer').addEventListener('input', (e) => {
this.answerValue = (<HTMLInputElement>e.target).value.trim();
});
document.getElementById('questionImageUrl').addEventListener('input', (e) => {
this.imageUrlValue = (<HTMLInputElement>e.target).value;
// Check if current value is valid.
if (this.imageUrlValue !== '') {
let img = new Image();
img.src = this.imageUrlValue;
img.onload = () => { this.imageUrlValid = true; }
img.onerror = () => { this.imageUrlValid = false; }
} else {
// If blank, set to valid.
this.imageUrlValid = true;
}
});
}
/**
* Sets values of input fields based on given object.
* @param question - object containing current values.
*/
private setPageValues(question): void {
// Text
let text = <HTMLInputElement>document.getElementById('questionText');
if (text !== null) { text.value = question.question; }
this.questionValue = question.question;
// Answer
let answer = <HTMLInputElement>document.getElementById('questionAnswer');
if (answer !== null) { answer.value = question.answer; }
this.answerValue = question.answer;
// Image url.
let imageUrl = <HTMLInputElement>document.getElementById('questionImageUrl');
if (imageUrl !== null) { imageUrl.value = question.imageUrl; }
this.imageUrlValue = question.imageUrl;
}
/**
* Resets inputs to their initial values.
*/
public resetValues(): void {
if (this.question$ !== null) {
this.setPageValues(this.question$);
}
}
/**
* Validates inputs and updates question on api if valid.
*/
public editTestQuestion(): void {
// Get and validate question.
let question = this.buildTestQuestion();
if (question == null) { return; }
if (Object.keys(question).length == 0) {
this.errorMessage = 'You have not changed any values.';
return;
}
// Submit if allowed.
if (!this.submitted) {
this.submitted = true;
this.testQuestionService.editTestQuestion(
this.subjectid, this.topicid, this.testid, this.question$.id, question).subscribe(
this.handleSuccess,
this.handleFailure
);
}
}
/**
* Builds question object containing values for updating.
* Returns object if successful.
* May return object with no attributes if no values changed.
* Else returns null.
*/
private buildTestQuestion(): object {
let question = {};
// Text
let text = <HTMLInputElement>document.getElementById('questionText');
if (text == null) { return null; }
if (text.value.trim() == '') {
this.errorMessage = 'You must enter text for the question.';
return null;
}
if (text.value.trim() !== this.question$.question) {
question['question'] = text.value.trim();
}
// Answer
let answer = <HTMLInputElement>document.getElementById('questionAnswer');
if (answer == null) { return null; }
if (answer.value.trim() == '') {
this.errorMessage = 'You must enter an answer for the question.';
return null;
}
if (answer.value.trim() !== this.question$.answer) {
question['answer'] = answer.value.trim();
}
// Image url
let imageUrl = <HTMLInputElement>document.getElementById('questionImageUrl');
if (imageUrl == null) { return null; }
if (imageUrl.value.trim() !== this.question$.imageUrl) {
question['imageUrl'] = imageUrl.value.trim();
}
this.errorMessage = null;
return question;
}
// Handlers for main api call (edit test question)
private handleSuccess = (res) => {
let route = environment.routes.testQuestionHome;
route = route.replace(`:${environment.routeParams.subjectid}`, this.subjectid);
route = route.replace(`:${environment.routeParams.topicid}`, this.topicid);
route = route.replace(`:${environment.routeParams.testid}`, this.testid);
route = route.replace(`:${environment.routeParams.questionId}`, res[0].id);
this.router.navigate([ route ]);
}
private handleFailure = (err) => {
switch (err.status) {
case 400: // User inputted something wrong.
this.errorMessage = err.error.message;
this.submitted = false;
break;
case 401: // User not admin.
this.redirectToTestHome();
break;
case 500: // Something went wrong with server.
this.errorMessage = 'Sorry, something went wrong with the server. Please try again later.';
break;
default: // Unknown error.
console.error('TestQuestion-Editor unknown error:', err);
break;
}
}
/**
* Redirects user to topic home.
*/
private redirectToTestHome() {
this.router.navigate([
this.navService.getTestHomeRoute(this.subjectid, this.topicid, this.testid)
]);
}
}
| TestQuestionEditorComponent |
language.py | from .tokenizer import Tokenizer
from .vocab import Vocab
from .doc import Doc
from .pointers.doc_pointer import DocPointer
from .pipeline import SubPipeline
from syft.generic.object import AbstractObject
from syft.workers.base import BaseWorker
from syft.generic.string import String
from syft.generic.pointers.string_pointer import StringPointer
from syft.generic.pointers.object_pointer import ObjectPointer
from typing import List, Union, Tuple
class BaseDefaults(object):
"""A class that defines all the defaults of the Language class
"""
@classmethod
def create_vocab(cls, model_name) -> Vocab:
"""
Creates the Vocab object that holds the vocabulary along with vocabulary meta data
Todo:
I started by a very simple Vocab class that
contains only a variable called 'vectors' of type DICT to hold word vectors
vocab.vectors['word'] = float. To be reviewed for more complex functionality.
"""
# Instantiate the Vocab object
vocab = Vocab(model_name)
return vocab
@classmethod
def create_tokenizer(cls, vocab,) -> Tokenizer:
"""Creates a Tokenizer object that will be used to create the Doc object, which is the
main container for annotated tokens.
"""
# Instantiate the Tokenizer object and return it
tokenizer = Tokenizer(vocab,)
return tokenizer
class Language(AbstractObject):
"""Inspired by spaCy Language class.
Orchestrates the interactions between different components of the pipeline
to accomplish core text-processing task.
It create the Doc object which is the container into which all text-processing
pipeline components feed their results.
"""
def __init__(
self,
model_name,
id: int = None,
owner: BaseWorker = None,
tags: List[str] = None,
description: str = None,
):
# Define the default settings
self.Defaults = BaseDefaults
# Create the vocabulary
self.vocab = self.Defaults.create_vocab(model_name)
# Create a dictionary that associates to the name of each text-processing component
# of the pipeline, an object that is charged to accomplish the job.
self.factories = {"tokenizer": self.Defaults.create_tokenizer(self.vocab)}
# Initialize the subpipeline template
# It only contains the tokenizer at initialization
self.pipeline_template = [{"remote": True, "name": "tokenizer"}]
# Intialize the main pipeline
self._reset_pipeline()
super(Language, self).__init__(id=id, owner=owner, tags=tags, description=description)
@property
def pipe_names(self) -> List[str]:
"""Returns a list of component names in the pipeline in order of execution.
Returns:
(list): List of all pipeline component name in order of execution.
"""
return [pipe_template["name"] for pipe_template in self.pipeline_template]
def _parse_pipeline_template(self):
"""Parses the `pipeline_template` property to
create the `subpipeline_templates` property.
"""
# Initialize a subpipeline template with the
# tokenizer. The tokenizer alway has 'remote' set
# to True.
subpipeline_template = dict(
remote=self.pipeline_template[0]["remote"], names=[self.pipeline_template[0]["name"]],
)
# Initialize the subpipeline templates list as a class property
self.subpipeline_templates = [subpipeline_template]
# Loop through the pipeline template elements
for pipe_template in self.pipeline_template[1:]:
# compare `remote` properties between templates:
# If the pipe template has the same `remote` value,
# it is appended to the existing subpipeline template
if pipe_template["remote"] == subpipeline_template["remote"]:
subpipeline_template["names"].append(pipe_template["name"])
# Otherwise, create a new subpipeline template and add the
# pipe template to it
else:
subpipeline_template = dict(
remote=pipe_template["remote"], names=[pipe_template["name"]]
)
self.subpipeline_templates.append(subpipeline_template)
def _reset_pipeline(self):
"""Reset the `pipeline` class property.
"""
# Read the pipeline components from the template and aggregate them into
# a list of subpipline templates.
# This method will create the instance variable
# self.subpipeline_templates
self._parse_pipeline_template()
# Get the number of subpipelines
subpipeline_count = len(self.subpipeline_templates)
# Initialize a new empty pipeline with as many
# empty dicts as there are subpipelines
self.pipeline = [dict() for i in range(subpipeline_count)]
def add_pipe(
self,
component: callable,
remote: bool = False,
name: str = None,
before: str = None,
after: str = None,
first: bool = False,
last: bool = True,
):
"""Adds a pipe template to a subpipeline tempaltes.
A pipe template is a dict of the form `{'remote': remote, 'name': name}`.
Few main steps are carried out here:
1- The new pipe name is added at the right position in the pipeline template.
Here is an example of how pipeline template list looks like
self.pipeline_template = [{'remote': True, 'name': 'tokenizer'},
{'remote': True, 'name': <pipe_1_name>},
{'remote': True, 'name': <pipe_2_name>},
{'remote': False, 'name': <pipe_3_name>},
{'remote': False, 'name': <pipe_4_name>}]
2- The pipeline template is parsed into a list or subpipeline templates.
Each subpipeline template is an aggregation of adjacent pipes with
the same value for 'remote'
Here is an example of how the subpipeline template list for the above
pipeline template would look like:
self.subpipeline_templates = [{'remote': True, 'names': ['tokenizer',
'pipe_1_name',
'pipe_2_name']},
{'remote': False, 'name': ['pipe_3_name',
'pipe_4_name']}
]
3- The pipeline is initialize by creating a list with as many empty dicts as
there are subpipelines:
self.pipeline = [dict(), dict()]
Args:
component (callable): This is a callable that takes a Doc object and modifies
it inplace.
name (str): The name of the pipeline component to be added. Defaults to None.
remote (bool): If True, the pipe component will be sent to the remote worker
where the Doc object resides. If False, the pipe will operate locally,
either on a Doc object directly, or on a DocPointer returned by the previous
component in the pipeline. Defaults to False.
before (str): The name of the pipeline component before which the new component
is to be added. Defaults to None.
after (str): The name of the pipeline component after which the new component
is to be added. Defaults to None.
first (bool): if set to True, the new pipeline component will be add as the
first element of the pipeline (after the tokenizer). Defaults to False.
last (bool): if set to True, the new pipeline component will be add as the
last element of the pipeline (after the tokenizer). Defaults to True.
"""
# The component argument must be callable
# [TODO] An exception with a custom error message should be thrown
assert hasattr(component, "__call__"), "Argument `component` is not a callable."
# Make sure the `component` argument is an object that has a `factory()` method
assert hasattr(
component, "factory"
), "Argument `component` should be an object that has a `factory()` method"
# [TODO] The following requirement should be relaxed and a name should be
# automatically assigned in case `name` is None. This would be convenient
# as done by spaCy
assert (
isinstance(name, str) and len(name) >= 1
), "Argument `name` should be of type `str` with at least one character."
# [TODO] Add custom error message
assert (
name not in self.pipe_names
), "Pipeline component name '{}' that you have chosen is already used by another pipeline component.".format(
name
)
# Make sure only one of 'before', 'after', 'first' or 'last' is set
# [TODO] Add custom error message
assert (
sum([bool(before), bool(after), bool(first), bool(last)]) < 2
), "Only one among arguments 'before', 'after', 'first' or 'last' should be set."
# Add the new pipe component to the list of factories
self.factories[name] = component
# Create the pipe template that will be added the pipeline
# template
pipe_template = dict(remote=remote, name=name)
# Add the pipe template at the right position
if last or not any([before, after, first]):
self.pipeline_template.append(pipe_template)
elif first:
# The index 0 is reserved for the tokenizer
self.pipeline_template.insert(index=1, element=pipe_template)
elif before in self.pipe_names:
self.pipeline_template.insert(
index=self.pipe_names.index(before), element=pipe_template
)
elif after in self.pipe_names:
self.pipeline_template.insert(
index=self.pipe_names.index(after) + 1, element=pipe_template
)
else:
# [TODO] Raise exception with custom error message
assert (
False
), "component cannot be added to the pipeline, \
please double check argument values of the `add_pipe` method call."
# Reset the pipeline.
# The instance variable that will be affected is:
# self.pipeline
self._reset_pipeline()
def | (self, name: str) -> Tuple[str, callable]:
"""Removes the pipeline whose name is 'name'
Args:
name (str): The name of the pipeline component to remove.
Returns:
The removed pipe
"""
# [TODO] Add custom error message
assert (
name in self.pipe_names
), "No pipeline component with the specified name '{}' was found".format(name)
# Get the index of the pipeline to be removed in the
# self.pipeline list
pipe_index = self.pipe_names.index(name)
# Delete the pipe using its index
pipe = self.pipeline_template.pop(pipe_index)
# Parse the pipeline template again
# to create the subpipeline templates
self._parse_pipeline_template()
# Reset the pipeline.
self._reset_pipeline()
return pipe
def _run_subpipeline_from_template(
self, template_index: int, input=Union[str, String, StringPointer, Doc, DocPointer],
) -> Union[Doc, DocPointer]:
"""Runs the subpipeline at position `template_index` of
self.pipeline on the appropriate worker.
The worker on which the subpipeline is run is either the
the same worker on which `input` lives, if the `remote`
property of the subpipeline template is True. Or, it is the
local worker if `remote` is False.
If no subpipeline is yet created for the specified worker,
one is created using the template, and added to the pipeline.
Args:
template_index (int): The index of the subpipeline
template in `self.subpipelines_templates`
input (str, String, StringPointer, Doc, DocPointer):
The input on which the subpipeline operates.
It can be either the text to tokenize (or a pointer
to it) for the subpipeline at index 0, or it could
be the Doc (or its pointer) for all subsequent
subpipelines.
Returns:
(Doc or DocPointer): The new or updated Doc object or
a pointer to a Doc object.
"""
# Get the location ID of the worker where the text to be tokenized,
# or the Doc to be processed is located
if isinstance(input, ObjectPointer):
location_id = input.location.id
else:
location_id = self.owner.id
# Create a new SubPipeline object if one doesn't already exist on the
# worker where the input is located
if location_id not in self.pipeline[template_index]:
# Get the subpipeline template
subpipeline_template = self.subpipeline_templates[template_index]
# Is the pipeline a remote one?
remote = subpipeline_template["remote"]
# Instantiate a subpipeline and load the subpipeline template
subpipeline = SubPipeline()
subpipeline.load_template(template=subpipeline_template, factories=self.factories)
# Add the subpipeline to the pipeline
self.pipeline[template_index][location_id] = subpipeline
# Send the subpipeline to the worker where the input is located
if (
isinstance(input, ObjectPointer)
and input.location != self.owner # Is the input remote?
and remote # Is the subpipeline is sendable?
):
self.pipeline[template_index][location_id] = self.pipeline[template_index][
location_id
].send(input.location)
# Apply the subpipeline and get the doc or the Doc id.
# If a Doc ID is obtained, this signifies the ID of the
# Doc object on the remote worker.
doc_or_id = self.pipeline[template_index][location_id](input)
# If the doc is of type str or int, this means that a
# DocPointer should be created
if isinstance(doc_or_id, int) or isinstance(doc_or_id, str):
doc = DocPointer(location=input.location, id_at_location=doc_or_id, owner=self.owner)
# This is of type Doc then
else:
doc = doc_or_id
# return the doc
return doc
def __call__(self, text: Union[str, String, StringPointer]) -> Union[Doc, DocPointer]:
"""The text is tokenized and pipeline components are called
here, and the Doc object is returned.
Args:
text (str, String or StringPointer): the text to be tokenized and
processed by the pipeline components.
Returns:
(Doc or DocPointer): The Doc object or a pointer to a Doc object.
This object provides access to all token data.
"""
# Runs the first subpipeline.
# The first subpipeline is the one that has the tokenizer
doc = self._run_subpipeline_from_template(template_index=0, input=text)
# Apply the the rest of subpipelines sequentially
# Each subpipeline will modify the document `doc` inplace
for i, subpipeline in enumerate(self.pipeline[1:], start=1):
doc = self._run_subpipeline_from_template(template_index=i, input=doc)
# return the Doc object
return doc
| remove_pipe |
process_windows.go | package main
// https://gist.github.com/hallazzang/76f3970bfc949831808bbebc8ca15209#gistcomment-2948162
import (
"fmt"
"os"
"os/exec"
"unsafe"
"golang.org/x/sys/windows"
)
func RunProcess(name string, wait bool, args ...string) error {
cmd := exec.Command(name, args...)
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
group, err := newProcessGroup()
if err != nil {
return fmt.Errorf("error creating process group: %v", err)
}
defer group.Dispose()
if err := cmd.Start(); err != nil {
return fmt.Errorf("error starting process: %v", err)
}
if wait {
group.AddProcess(cmd.Process)
if err := cmd.Wait(); err != nil {
return fmt.Errorf("error waiting for the process: %v", err)
}
}
return nil
}
// We use this struct to retreive process handle(which is unexported)
// from os.Process using unsafe operation.
type process struct {
Pid int
Handle uintptr
}
type processGroup windows.Handle
func | () (processGroup, error) {
handle, err := windows.CreateJobObject(nil, nil)
if err != nil {
return 0, err
}
info := windows.JOBOBJECT_EXTENDED_LIMIT_INFORMATION{
BasicLimitInformation: windows.JOBOBJECT_BASIC_LIMIT_INFORMATION{
LimitFlags: windows.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE,
},
}
if _, err := windows.SetInformationJobObject(
handle,
windows.JobObjectExtendedLimitInformation,
uintptr(unsafe.Pointer(&info)),
uint32(unsafe.Sizeof(info))); err != nil {
return 0, err
}
return processGroup(handle), nil
}
func (g processGroup) Dispose() error {
return windows.CloseHandle(windows.Handle(g))
}
func (g processGroup) AddProcess(p *os.Process) error {
return windows.AssignProcessToJobObject(windows.Handle(g), windows.Handle((*process)(unsafe.Pointer(p)).Handle))
}
| newProcessGroup |
github_utils.py | import os
import re
from collections import OrderedDict
from typing import Generator, List, Optional, Reversible
import delegator
from git import Commit
from git.exc import InvalidGitRepositoryError
from git.repo import Repo
from github import Github
from github.Label import Label
from github.Issue import Issue
from github.GithubException import UnknownObjectException
from github.Repository import Repository
from cherrytree.classes import CherryTreeExecutionException
# PRs are either of form "Merge pull request #nnn from..." or "...(#nnn)"
PR_REGEX = re.compile(r"(^Merge pull request #(\d+) from|\(#(\d+)\)$)")
def get_github_instance() -> Github:
token = os.environ.get("GITHUB_TOKEN")
if not token:
raise Exception("Env var 'GITHUB_TOKEN' is missing")
return Github(token)
def get_repo(repo: str) -> Repository:
g = get_github_instance()
return g.get_repo(repo)
def get_issues_from_labels(repo: str, label: str, prs_only: bool = False) -> List[Issue]:
label_objects: List[Label] = []
gh_repo = get_repo(repo)
try:
label_objects.append(gh_repo.get_label(label))
except UnknownObjectException:
# unknown label
return []
issues = gh_repo.get_issues(labels=label_objects, state="all")
if prs_only:
return [o for o in issues if o.pull_request]
return [o for o in issues]
def get_issue(repo: str, id_: int) -> Optional[Issue]:
gh_repo = get_repo(repo)
try:
return gh_repo.get_issue(id_)
except UnknownObjectException:
# unknown id
return None
def get_commits(repo: str, branch: str, since=None):
"""Get commit objects from a branch, over a limited period"""
gh_repo = get_repo(repo)
branch_object = gh_repo.get_branch(branch)
sha = branch_object.commit.sha
if since:
commits = gh_repo.get_commits(sha=sha, since=since)
else:
commits = gh_repo.get_commits(sha=sha)
return commits
def commit_pr_number(commit: Commit) -> Optional[int]:
"""Given a commit object, returns the PR number"""
res = PR_REGEX.search(commit.summary)
if res:
groups = res.groups()
return int(groups[1] or groups[2])
return None
def get_commit_pr_map(commits: Reversible[Commit]):
"""Given a list of commits and prs, returns a map of pr_number to commit"""
d = OrderedDict()
for commit in reversed(commits):
pr_number = commit_pr_number(commit)
if pr_number:
d[pr_number] = commit
return d
def truncate_str(value: str, width: int = 90) -> str:
cont_str = "..."
trunc_value = value[: width - len(cont_str)].strip()
if len(trunc_value) < len(value.strip()):
trunc_value = f"{trunc_value}{cont_str}"
return f"{trunc_value:<{width}}"
def git_get_current_head() -> str:
output = os_system("git status | head -1")
match = re.match("(?:HEAD detached at|On branch) (.*)", output)
if not match:
return ""
return match.group(1)
def os_system(cmd, raise_on_error=True) -> str:
|
def check_if_branch_exists(branch: str) -> bool:
current_head = git_get_current_head()
try:
os_system(f"git checkout {branch}")
except CherryTreeExecutionException:
return False
os_system(f"git checkout {current_head}")
return True
def deduplicate_prs(prs: List[Issue]) -> List[Issue]:
pr_set = set()
ret: List[Issue] = []
for pr in prs:
if pr.number not in pr_set:
ret.append(pr)
pr_set.add(pr.number)
return ret
def get_git_repo() -> Repo:
"""
Find the path containing the git repo. Start by checking the current working
directory, and proceed up the directory tree if a git repo can't be found.
returns: Paath to closest git repo
raises FileNotFoundError: if no git repo is found in the current path
"""
def _traverse_dirs(path: str) -> Generator[str, None, None]:
# first yield the current directory
yield path
# then start yielding parents until we reach the root
while True:
parent = os.path.dirname(path)
if path != parent:
yield parent
path = parent
else:
break
cwd = os.getcwd()
for dir_ in _traverse_dirs(cwd):
try:
repo = Repo(dir_)
return repo
except InvalidGitRepositoryError:
pass
raise FileNotFoundError("No git repo found in path: {}". format(cwd))
| p = delegator.run(cmd)
if raise_on_error and p.return_code != 0:
raise CherryTreeExecutionException(p.err)
return p.out |
lightning.py | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""nn.Module with additional great features."""
import collections
import copy
import inspect
import os
import re
import tempfile
from abc import ABC
from argparse import Namespace
from pathlib import Path
from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple, Union
import torch
from torch import ScriptModule, Tensor
from torch.nn import Module
from torch.optim.optimizer import Optimizer
from pytorch_lightning import _logger as log
from pytorch_lightning.core.grads import GradInformation
from pytorch_lightning.core.hooks import CheckpointHooks, DataHooks, ModelHooks
from pytorch_lightning.core.memory import ModelSummary
from pytorch_lightning.core.optimizer import LightningOptimizer
from pytorch_lightning.core.saving import ALLOWED_CONFIG_TYPES, ModelIO, PRIMITIVE_TYPES
from pytorch_lightning.core.step_result import Result
from pytorch_lightning.utilities import rank_zero_warn, TPU_AVAILABLE
from pytorch_lightning.utilities.device_dtype_mixin import DeviceDtypeModuleMixin
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.parsing import AttributeDict, collect_init_args, get_init_args
if TPU_AVAILABLE:
import torch_xla.core.xla_model as xm
class LightningModule(
ABC,
DeviceDtypeModuleMixin,
GradInformation,
ModelIO,
ModelHooks,
DataHooks,
CheckpointHooks,
Module,
):
# Below is for property support of JIT in PyTorch 1.7
# since none of them is important when using JIT, we are going to ignore them.
__jit_unused_properties__ = [
"datamodule",
"example_input_array",
"hparams",
"hparams_initial",
"on_gpu",
"current_epoch",
"global_step",
] + DeviceDtypeModuleMixin.__jit_unused_properties__
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# see (https://github.com/pytorch/pytorch/blob/3e6bb5233f9ca2c5aa55d9cda22a7ee85439aa6e/
# torch/nn/modules/module.py#L227)
torch._C._log_api_usage_once(f"lightning.module.{self.__class__.__name__}")
self.exp_save_path = None
self.loaded_optimizer_states_dict = {}
#: Pointer to the trainer object
self.trainer = None
#: Pointer to the logger object
self.logger = None
#: True if using dp
self.use_dp = False
#: True if using ddp
self.use_ddp = False
#: True if using ddp2
self.use_ddp2 = False
# True if on tpu
self.use_tpu = False
#: True if using amp
self.use_amp = False
#: The precision used
self.precision = 32
# optionally can be set by user
self._example_input_array = None
self._datamodule = None
self._results: Optional[Result] = None
self._current_fx_name = ''
self._running_manual_backward = False
self._current_hook_fx_name = None
self._current_dataloader_idx = None
self._automatic_optimization: bool = True
def optimizers(self, use_pl_optimizer: bool = True) -> Union[Optimizer, List[Optimizer], List[LightningOptimizer]]:
if use_pl_optimizer:
opts = list(self.trainer.lightning_optimizers.values())
else:
opts = self.trainer.optimizers
# single optimizer
if isinstance(opts, list) and len(opts) == 1 and isinstance(opts[0], Optimizer):
return opts[0]
# multiple opts
return opts
@property
def example_input_array(self) -> Any:
return self._example_input_array
@property
def current_epoch(self) -> int:
"""The current epoch"""
return self.trainer.current_epoch if self.trainer else 0
@property
def global_step(self) -> int:
"""Total training batches seen across all epochs"""
return self.trainer.global_step if self.trainer else 0
@example_input_array.setter
def example_input_array(self, example: Any) -> None:
self._example_input_array = example
@property
def datamodule(self) -> Any:
return self._datamodule
@datamodule.setter
def datamodule(self, datamodule: Any) -> None:
self._datamodule = datamodule
@property
def on_gpu(self):
"""
True if your model is currently running on GPUs.
Useful to set flags around the LightningModule for different CPU vs GPU behavior.
"""
return self.device.type == "cuda"
@property
def automatic_optimization(self) -> bool:
"""
If False you are responsible for calling .backward, .step, zero_grad.
"""
return self._automatic_optimization
@automatic_optimization.setter
def automatic_optimization(self, automatic_optimization: bool) -> None:
self._automatic_optimization = automatic_optimization
def print(self, *args, **kwargs) -> None:
r"""
Prints only from process 0. Use this in any distributed mode to log only once.
Args:
*args: The thing to print. Will be passed to Python's built-in print function.
**kwargs: Will be passed to Python's built-in print function.
Example:
.. code-block:: python
def forward(self, x):
self.print(x, 'in forward')
"""
if self.trainer.is_global_zero:
print(*args, **kwargs)
def log(
self,
name: str,
value: Any,
prog_bar: bool = False,
logger: bool = True,
on_step: Optional[bool] = None,
on_epoch: Optional[bool] = None,
reduce_fx: Callable = torch.mean,
tbptt_reduce_fx: Callable = torch.mean,
tbptt_pad_token: int = 0,
enable_graph: bool = False,
sync_dist: bool = False,
sync_dist_op: Union[Any, str] = 'mean',
sync_dist_group: Optional[Any] = None,
):
"""
Log a key, value
Example::
self.log('train_loss', loss)
The default behavior per hook is as follows
.. csv-table:: ``*`` also applies to the test loop
:header: "LightningMoule Hook", "on_step", "on_epoch", "prog_bar", "logger"
:widths: 20, 10, 10, 10, 10
"training_step", "T", "F", "F", "T"
"training_step_end", "T", "F", "F", "T"
"training_epoch_end", "F", "T", "F", "T"
"validation_step*", "F", "T", "F", "T"
"validation_step_end*", "F", "T", "F", "T"
"validation_epoch_end*", "F", "T", "F", "T"
Args:
name: key name
value: value name
prog_bar: if True logs to the progress bar
logger: if True logs to the logger
on_step: if True logs at this step. None auto-logs at the training_step but not validation/test_step
on_epoch: if True logs epoch accumulated metrics. None auto-logs at the val/test step but not training_step
reduce_fx: reduction function over step values for end of epoch. Torch.mean by default
tbptt_reduce_fx: function to reduce on truncated back prop
tbptt_pad_token: token to use for padding
enable_graph: if True, will not auto detach the graph
sync_dist: if True, reduces the metric across GPUs/TPUs
sync_dist_op: the op to sync across GPUs/TPUs
sync_dist_group: the ddp group
"""
if self._results is not None:
# in any epoch end can't log step metrics (only epoch metric)
if 'epoch_end' in self._current_fx_name and on_step:
m = f'on_step=True cannot be used on {self._current_fx_name} method'
raise MisconfigurationException(m)
if 'epoch_end' in self._current_fx_name and on_epoch is False:
m = f'on_epoch cannot be False when called from the {self._current_fx_name} method'
raise MisconfigurationException(m)
# add log_dict
# TODO: if logged twice fail with crash
# set the default depending on the fx_name
on_step = self.__auto_choose_log_on_step(on_step)
on_epoch = self.__auto_choose_log_on_epoch(on_epoch)
if self._current_hook_fx_name is not None:
self.trainer.logger_connector.check_logging_in_callbacks(
self._current_hook_fx_name,
on_step=on_step,
on_epoch=on_epoch
)
# make sure user doesn't introduce logic for multi-dataloaders
if "/dataloader_idx_" in name:
raise MisconfigurationException(
f"Logged key: {name} should not contain information about dataloader_idx.")
accelerator = self.trainer.accelerator_backend
self._results.log(
name,
value,
prog_bar,
logger,
on_step,
on_epoch,
reduce_fx,
tbptt_reduce_fx,
tbptt_pad_token,
enable_graph,
sync_dist,
sync_dist_op,
sync_dist_group,
accelerator.sync_tensor,
self._current_dataloader_idx,
self.device,
)
def log_dict(
self,
dictionary: dict,
prog_bar: bool = False,
logger: bool = True,
on_step: Optional[bool] = None,
on_epoch: Optional[bool] = None,
reduce_fx: Callable = torch.mean,
tbptt_reduce_fx: Callable = torch.mean,
tbptt_pad_token: int = 0,
enable_graph: bool = False,
sync_dist: bool = False,
sync_dist_op: Union[Any, str] = 'mean',
sync_dist_group: Optional[Any] = None,
):
"""
Log a dictonary of values at once
Example::
values = {'loss': loss, 'acc': acc, ..., 'metric_n': metric_n}
self.log_dict(values)
Args:
dictionary: key value pairs (str, tensors)
prog_bar: if True logs to the progress base
logger: if True logs to the logger
on_step: if True logs at this step. None auto-logs for training_step but not validation/test_step
on_epoch: if True logs epoch accumulated metrics. None auto-logs for val/test step but not training_step
reduce_fx: reduction function over step values for end of epoch. Torch.mean by default
tbptt_reduce_fx: function to reduce on truncated back prop
tbptt_pad_token: token to use for padding
enable_graph: if True, will not auto detach the graph
sync_dist: if True, reduces the metric across GPUs/TPUs
sync_dist_op: the op to sync across GPUs/TPUs
sync_dist_group: the ddp group:
"""
for k, v in dictionary.items():
self.log(
name=k,
value=v,
prog_bar=prog_bar,
logger=logger,
on_step=on_step,
on_epoch=on_epoch,
reduce_fx=reduce_fx,
enable_graph=enable_graph,
sync_dist=sync_dist,
sync_dist_group=sync_dist_group,
sync_dist_op=sync_dist_op,
tbptt_pad_token=tbptt_pad_token,
tbptt_reduce_fx=tbptt_reduce_fx,
)
def write_prediction(self, name, value, filename='predictions.pt'):
self.trainer.evaluation_loop.predictions._add_prediction(name, value, filename)
def write_prediction_dict(self, predictions_dict, filename='predictions.pt'):
for k, v in predictions_dict.items():
self.write_prediction(k, v, filename)
def __auto_choose_log_on_step(self, on_step):
if on_step is None:
if self._current_fx_name in {'training_step', 'training_step_end'}:
on_step = True
elif self._current_fx_name in {'evaluation_step', 'evaluation_step_end',
'evaluation_epoch_end', 'training_epoch_end'}:
on_step = False
else:
on_step = False
return on_step
def __auto_choose_log_on_epoch(self, on_epoch):
if on_epoch is None:
if self._current_fx_name in {'training_step', 'training_step_end'}:
on_epoch = False
elif self._current_fx_name in {'evaluation_step', 'evaluation_step_end',
'evaluation_epoch_end', 'training_epoch_end'}:
on_epoch = True
else:
on_epoch = True
return on_epoch
def all_gather(self, tensor: Union[torch.Tensor], group: Optional[Any] = None, sync_grads: bool = False):
r"""
Allows users to call ``self.all_gather()`` from the LightningModule, thus making
the ```all_gather``` operation accelerator agnostic.
```all_gather``` is a function provided by accelerators to gather a tensor from several
distributed processes
Args:
tensor: tensor of shape (batch, ...)
group: the process group to gather results from. Defaults to all processes (world)
sync_grads: flag that allows users to synchronize gradients for all_gather op
Return:
A tensor of shape (world_size, batch, ...)
"""
return self.trainer.accelerator_backend.all_gather(tensor, group=group, sync_grads=sync_grads)
def forward(self, *args, **kwargs):
r"""
Same as :meth:`torch.nn.Module.forward()`, however in Lightning you want this to define
the operations you want to use for prediction (i.e.: on a server or as a feature extractor).
Normally you'd call ``self()`` from your :meth:`training_step` method.
This makes it easy to write a complex system for training with the outputs
you'd want in a prediction setting.
You may also find the :func:`~pytorch_lightning.core.decorators.auto_move_data` decorator useful
when using the module outside Lightning in a production setting.
Args:
*args: Whatever you decide to pass into the forward method.
**kwargs: Keyword arguments are also possible.
Return:
Predicted output
Examples:
.. code-block:: python
# example if we were using this model as a feature extractor
def forward(self, x):
feature_maps = self.convnet(x)
return feature_maps
def training_step(self, batch, batch_idx):
x, y = batch
feature_maps = self(x)
logits = self.classifier(feature_maps)
# ...
return loss
# splitting it this way allows model to be used a feature extractor
model = MyModelAbove()
inputs = server.get_request()
results = model(inputs)
server.write_results(results)
# -------------
# This is in stark contrast to torch.nn.Module where normally you would have this:
def forward(self, batch):
x, y = batch
feature_maps = self.convnet(x)
logits = self.classifier(feature_maps)
return logits
"""
return super().forward(*args, **kwargs)
def training_step(self, *args, **kwargs):
r"""
Here you compute and return the training loss and some additional metrics for e.g.
the progress bar or logger.
Args:
batch (:class:`~torch.Tensor` | (:class:`~torch.Tensor`, ...) | [:class:`~torch.Tensor`, ...]):
The output of your :class:`~torch.utils.data.DataLoader`. A tensor, tuple or list.
batch_idx (int): Integer displaying index of this batch
optimizer_idx (int): When using multiple optimizers, this argument will also be present.
hiddens(:class:`~torch.Tensor`): Passed in if
:paramref:`~pytorch_lightning.trainer.trainer.Trainer.truncated_bptt_steps` > 0.
Return:
Any of.
- :class:`~torch.Tensor` - The loss tensor
- `dict` - A dictionary. Can include any keys, but must include the key 'loss'
- `None` - Training will skip to the next batch
In this step you'd normally do the forward pass and calculate the loss for a batch.
You can also do fancier things like multiple forward passes or something model specific.
Example::
def training_step(self, batch, batch_idx):
x, y, z = batch
out = self.encoder(x)
loss = self.loss(out, x)
return loss
If you define multiple optimizers, this step will be called with an additional
``optimizer_idx`` parameter.
.. code-block:: python
# Multiple optimizers (e.g.: GANs)
def training_step(self, batch, batch_idx, optimizer_idx):
if optimizer_idx == 0:
# do training_step with encoder
if optimizer_idx == 1:
# do training_step with decoder
If you add truncated back propagation through time you will also get an additional
argument with the hidden states of the previous step.
.. code-block:: python
# Truncated back-propagation through time
def training_step(self, batch, batch_idx, hiddens):
# hiddens are the hidden states from the previous truncated backprop step
...
out, hiddens = self.lstm(data, hiddens)
...
return {'loss': loss, 'hiddens': hiddens}
Note:
The loss value shown in the progress bar is smoothed (averaged) over the last values,
so it differs from the actual loss returned in train/validation step.
"""
rank_zero_warn(
"`training_step` must be implemented to be used with the Lightning Trainer"
)
def training_step_end(self, *args, **kwargs):
"""
Use this when training with dp or ddp2 because :meth:`training_step`
will operate on only part of the batch. However, this is still optional
and only needed for things like softmax or NCE loss.
Note:
If you later switch to ddp or some other mode, this will still be called
so that you don't have to change your code
.. code-block:: python
# pseudocode
sub_batches = split_batches_for_dp(batch)
batch_parts_outputs = [training_step(sub_batch) for sub_batch in sub_batches]
training_step_end(batch_parts_outputs)
Args:
batch_parts_outputs: What you return in `training_step` for each batch part.
Return:
Anything
When using dp/ddp2 distributed backends, only a portion of the batch is inside the training_step:
.. code-block:: python
def training_step(self, batch, batch_idx):
# batch is 1/num_gpus big
x, y = batch
out = self(x)
# softmax uses only a portion of the batch in the denomintaor
loss = self.softmax(out)
loss = nce_loss(loss)
return loss
If you wish to do something with all the parts of the batch, then use this method to do it:
.. code-block:: python
def training_step(self, batch, batch_idx):
# batch is 1/num_gpus big
x, y = batch
out = self.encoder(x)
return {'pred': out}
def training_step_end(self, training_step_outputs):
gpu_0_pred = training_step_outputs[0]['pred']
gpu_1_pred = training_step_outputs[1]['pred']
gpu_n_pred = training_step_outputs[n]['pred']
# this softmax now uses the full batch
loss = nce_loss([gpu_0_pred, gpu_1_pred, gpu_n_pred])
return loss
See Also:
See the :ref:`multi_gpu` guide for more details.
"""
def training_epoch_end(self, outputs: List[Any]) -> None:
"""
Called at the end of the training epoch with the outputs of all training steps.
Use this in case you need to do something with all the outputs for every training_step.
.. code-block:: python
# the pseudocode for these calls
train_outs = []
for train_batch in train_data:
out = training_step(train_batch)
train_outs.append(out)
training_epoch_end(train_outs)
Args:
outputs: List of outputs you defined in :meth:`training_step`, or if there are
multiple dataloaders, a list containing a list of outputs for each dataloader.
Return:
None
Note:
If this method is not overridden, this won't be called.
Example::
def training_epoch_end(self, training_step_outputs):
# do something with all training_step outputs
return result
With multiple dataloaders, ``outputs`` will be a list of lists. The outer list contains
one entry per dataloader, while the inner list contains the individual outputs of
each training step for that dataloader.
.. code-block:: python
def training_epoch_end(self, training_step_outputs):
for out in training_step_outputs:
# do something here
"""
def validation_step(self, *args, **kwargs):
r"""
Operates on a single batch of data from the validation set.
In this step you'd might generate examples or calculate anything of interest like accuracy.
.. code-block:: python
# the pseudocode for these calls
val_outs = []
for val_batch in val_data:
out = validation_step(val_batch)
val_outs.append(out)
validation_epoch_end(val_outs)
Args:
batch (:class:`~torch.Tensor` | (:class:`~torch.Tensor`, ...) | [:class:`~torch.Tensor`, ...]):
The output of your :class:`~torch.utils.data.DataLoader`. A tensor, tuple or list.
batch_idx (int): The index of this batch
dataloader_idx (int): The index of the dataloader that produced this batch
(only if multiple val dataloaders used)
Return:
Any of.
- Any object or value
- `None` - Validation will skip to the next batch
.. code-block:: python
# pseudocode of order
out = validation_step()
if defined('validation_step_end'):
out = validation_step_end(out)
out = validation_epoch_end(out)
.. code-block:: python
# if you have one val dataloader:
def validation_step(self, batch, batch_idx)
# if you have multiple val dataloaders:
def validation_step(self, batch, batch_idx, dataloader_idx)
Examples:
.. code-block:: python
# CASE 1: A single validation dataset
def validation_step(self, batch, batch_idx):
x, y = batch
# implement your own
out = self(x)
loss = self.loss(out, y)
# log 6 example images
# or generated text... or whatever
sample_imgs = x[:6]
grid = torchvision.utils.make_grid(sample_imgs)
self.logger.experiment.add_image('example_images', grid, 0)
# calculate acc
labels_hat = torch.argmax(out, dim=1)
val_acc = torch.sum(y == labels_hat).item() / (len(y) * 1.0)
# log the outputs!
self.log_dict({'val_loss': loss, 'val_acc': val_acc})
If you pass in multiple val dataloaders, :meth:`validation_step` will have an additional argument.
.. code-block:: python
# CASE 2: multiple validation dataloaders
def validation_step(self, batch, batch_idx, dataloader_idx):
# dataloader_idx tells you which dataset this is.
Note:
If you don't need to validate you don't need to implement this method.
Note:
When the :meth:`validation_step` is called, the model has been put in eval mode
and PyTorch gradients have been disabled. At the end of validation,
the model goes back to training mode and gradients are enabled.
"""
def validation_step_end(self, *args, **kwargs):
"""
Use this when validating with dp or ddp2 because :meth:`validation_step`
will operate on only part of the batch. However, this is still optional
and only needed for things like softmax or NCE loss.
Note:
If you later switch to ddp or some other mode, this will still be called
so that you don't have to change your code.
.. code-block:: python
# pseudocode
sub_batches = split_batches_for_dp(batch)
batch_parts_outputs = [validation_step(sub_batch) for sub_batch in sub_batches]
validation_step_end(batch_parts_outputs)
Args:
batch_parts_outputs: What you return in :meth:`validation_step`
for each batch part.
Return:
None or anything
.. code-block:: python
# WITHOUT validation_step_end
# if used in DP or DDP2, this batch is 1/num_gpus large
def validation_step(self, batch, batch_idx):
# batch is 1/num_gpus big
x, y = batch
out = self.encoder(x)
loss = self.softmax(out)
loss = nce_loss(loss)
self.log('val_loss', loss)
# --------------
# with validation_step_end to do softmax over the full batch
def validation_step(self, batch, batch_idx):
# batch is 1/num_gpus big
x, y = batch
out = self(x)
return out
def validation_step_end(self, val_step_outputs):
for out in val_step_outputs:
# do something with these
See Also:
See the :ref:`multi_gpu` guide for more details.
"""
def validation_epoch_end(self, outputs: List[Any]) -> None:
"""
Called at the end of the validation epoch with the outputs of all validation steps.
.. code-block:: python
# the pseudocode for these calls
val_outs = []
for val_batch in val_data:
out = validation_step(val_batch)
val_outs.append(out)
validation_epoch_end(val_outs)
Args:
outputs: List of outputs you defined in :meth:`validation_step`, or if there
are multiple dataloaders, a list containing a list of outputs for each dataloader.
Return:
None
Note:
If you didn't define a :meth:`validation_step`, this won't be called.
Examples:
With a single dataloader:
.. code-block:: python
def validation_epoch_end(self, val_step_outputs):
for out in val_step_outputs:
# do something
With multiple dataloaders, `outputs` will be a list of lists. The outer list contains
one entry per dataloader, while the inner list contains the individual outputs of
each validation step for that dataloader.
.. code-block:: python
def validation_epoch_end(self, outputs):
for dataloader_output_result in outputs:
dataloader_outs = dataloader_output_result.dataloader_i_outputs
self.log('final_metric', final_value)
"""
def test_step(self, *args, **kwargs):
r"""
Operates on a single batch of data from the test set.
In this step you'd normally generate examples or calculate anything of interest
such as accuracy.
.. code-block:: python
# the pseudocode for these calls
test_outs = []
for test_batch in test_data:
out = test_step(test_batch)
test_outs.append(out)
test_epoch_end(test_outs)
Args:
batch (:class:`~torch.Tensor` | (:class:`~torch.Tensor`, ...) | [:class:`~torch.Tensor`, ...]):
The output of your :class:`~torch.utils.data.DataLoader`. A tensor, tuple or list.
batch_idx (int): The index of this batch.
dataloader_idx (int): The index of the dataloader that produced this batch
(only if multiple test dataloaders used).
Return:
Any of.
- Any object or value
- `None` - Testing will skip to the next batch
.. code-block:: python
# if you have one test dataloader:
def test_step(self, batch, batch_idx)
# if you have multiple test dataloaders:
def test_step(self, batch, batch_idx, dataloader_idx)
Examples:
.. code-block:: python
# CASE 1: A single test dataset
def test_step(self, batch, batch_idx):
x, y = batch
# implement your own
out = self(x)
loss = self.loss(out, y)
# log 6 example images
# or generated text... or whatever
sample_imgs = x[:6]
grid = torchvision.utils.make_grid(sample_imgs)
self.logger.experiment.add_image('example_images', grid, 0)
# calculate acc
labels_hat = torch.argmax(out, dim=1)
test_acc = torch.sum(y == labels_hat).item() / (len(y) * 1.0)
# log the outputs!
self.log_dict({'test_loss': loss, 'test_acc': test_acc})
If you pass in multiple test dataloaders, :meth:`test_step` will have an additional
argument.
.. code-block:: python
# CASE 2: multiple test dataloaders
def test_step(self, batch, batch_idx, dataloader_idx):
# dataloader_idx tells you which dataset this is.
Note:
If you don't need to test you don't need to implement this method.
Note:
When the :meth:`test_step` is called, the model has been put in eval mode and
PyTorch gradients have been disabled. At the end of the test epoch, the model goes back
to training mode and gradients are enabled.
"""
def test_step_end(self, *args, **kwargs):
"""
Use this when testing with dp or ddp2 because :meth:`test_step` will operate
on only part of the batch. However, this is still optional
and only needed for things like softmax or NCE loss.
Note:
If you later switch to ddp or some other mode, this will still be called
so that you don't have to change your code.
.. code-block:: python
# pseudocode
sub_batches = split_batches_for_dp(batch)
batch_parts_outputs = [test_step(sub_batch) for sub_batch in sub_batches]
test_step_end(batch_parts_outputs)
Args:
batch_parts_outputs: What you return in :meth:`test_step` for each batch part.
Return:
None or anything
.. code-block:: python
# WITHOUT test_step_end
# if used in DP or DDP2, this batch is 1/num_gpus large
def test_step(self, batch, batch_idx):
# batch is 1/num_gpus big
x, y = batch
out = self(x)
loss = self.softmax(out)
self.log('test_loss', loss)
# --------------
# with test_step_end to do softmax over the full batch
def test_step(self, batch, batch_idx):
# batch is 1/num_gpus big
x, y = batch
out = self.encoder(x)
return out
def test_step_end(self, output_results):
# this out is now the full size of the batch
all_test_step_outs = output_results.out
loss = nce_loss(all_test_step_outs)
self.log('test_loss', loss)
See Also:
See the :ref:`multi_gpu` guide for more details.
"""
def test_epoch_end(
self, outputs: List[Any]
) -> None:
"""
Called at the end of a test epoch with the output of all test steps.
.. code-block:: python
# the pseudocode for these calls
test_outs = []
for test_batch in test_data:
out = test_step(test_batch)
test_outs.append(out)
test_epoch_end(test_outs)
Args:
outputs: List of outputs you defined in :meth:`test_step_end`, or if there
are multiple dataloaders, a list containing a list of outputs for each dataloader
Return:
None
Note:
If you didn't define a :meth:`test_step`, this won't be called.
Examples:
With a single dataloader:
.. code-block:: python
def test_epoch_end(self, outputs):
# do something with the outputs of all test batches
all_test_preds = test_step_outputs.predictions
some_result = calc_all_results(all_test_preds)
self.log(some_result)
With multiple dataloaders, `outputs` will be a list of lists. The outer list contains
one entry per dataloader, while the inner list contains the individual outputs of
each test step for that dataloader.
.. code-block:: python
def test_epoch_end(self, outputs):
final_value = 0
for dataloader_outputs in outputs:
for test_step_out in dataloader_outputs:
# do something
final_value += test_step_out
self.log('final_metric', final_value)
"""
def configure_optimizers(
self,
):
r"""
Choose what optimizers and learning-rate schedulers to use in your optimization.
Normally you'd need one. But in the case of GANs or similar you might have multiple.
Return:
Any of these 6 options.
- Single optimizer.
- List or Tuple - List of optimizers.
- Two lists - The first list has multiple optimizers, the second a list of LR schedulers (or lr_dict).
- Dictionary, with an 'optimizer' key, and (optionally) a 'lr_scheduler'
key whose value is a single LR scheduler or lr_dict.
- Tuple of dictionaries as described, with an optional 'frequency' key.
- None - Fit will run without any optimizer.
Note:
The 'frequency' value is an int corresponding to the number of sequential batches
optimized with the specific optimizer. It should be given to none or to all of the optimizers.
There is a difference between passing multiple optimizers in a list,
and passing multiple optimizers in dictionaries with a frequency of 1:
In the former case, all optimizers will operate on the given batch in each optimization step.
In the latter, only one optimizer will operate on the given batch at every step.
The lr_dict is a dictionary which contains the scheduler and its associated configuration.
The default configuration is shown below.
.. code-block:: python
{
'scheduler': lr_scheduler, # The LR scheduler instance (required)
'interval': 'epoch', # The unit of the scheduler's step size
'frequency': 1, # The frequency of the scheduler
'reduce_on_plateau': False, # For ReduceLROnPlateau scheduler
'monitor': 'val_loss', # Metric for ReduceLROnPlateau to monitor
'strict': True, # Whether to crash the training if `monitor` is not found
'name': None, # Custom name for LearningRateMonitor to use
}
Only the ``scheduler`` key is required, the rest will be set to the defaults above.
Examples:
.. code-block:: python
# most cases
def configure_optimizers(self):
opt = Adam(self.parameters(), lr=1e-3)
return opt
# multiple optimizer case (e.g.: GAN)
def configure_optimizers(self):
generator_opt = Adam(self.model_gen.parameters(), lr=0.01)
disriminator_opt = Adam(self.model_disc.parameters(), lr=0.02)
return generator_opt, disriminator_opt
# example with learning rate schedulers
def configure_optimizers(self):
generator_opt = Adam(self.model_gen.parameters(), lr=0.01)
disriminator_opt = Adam(self.model_disc.parameters(), lr=0.02)
discriminator_sched = CosineAnnealing(discriminator_opt, T_max=10)
return [generator_opt, disriminator_opt], [discriminator_sched]
# example with step-based learning rate schedulers
def configure_optimizers(self):
gen_opt = Adam(self.model_gen.parameters(), lr=0.01)
dis_opt = Adam(self.model_disc.parameters(), lr=0.02)
gen_sched = {'scheduler': ExponentialLR(gen_opt, 0.99),
'interval': 'step'} # called after each training step
dis_sched = CosineAnnealing(discriminator_opt, T_max=10) # called every epoch
return [gen_opt, dis_opt], [gen_sched, dis_sched]
# example with optimizer frequencies
# see training procedure in `Improved Training of Wasserstein GANs`, Algorithm 1
# https://arxiv.org/abs/1704.00028
def configure_optimizers(self):
gen_opt = Adam(self.model_gen.parameters(), lr=0.01)
dis_opt = Adam(self.model_disc.parameters(), lr=0.02)
n_critic = 5
return (
{'optimizer': dis_opt, 'frequency': n_critic},
{'optimizer': gen_opt, 'frequency': 1}
)
Note:
Some things to know:
- Lightning calls ``.backward()`` and ``.step()`` on each optimizer
and learning rate scheduler as needed.
- If you use 16-bit precision (``precision=16``), Lightning will automatically
handle the optimizers for you.
- If you use multiple optimizers, :meth:`training_step` will have an additional
``optimizer_idx`` parameter.
- If you use LBFGS Lightning handles the closure function automatically for you.
- If you use multiple optimizers, gradients will be calculated only
for the parameters of current optimizer at each training step.
- If you need to control how often those optimizers step or override the
default ``.step()`` schedule, override the :meth:`optimizer_step` hook.
- If you only want to call a learning rate scheduler every ``x`` step or epoch,
or want to monitor a custom metric, you can specify these in a lr_dict:
.. code-block:: python
{
'scheduler': lr_scheduler,
'interval': 'step', # or 'epoch'
'monitor': 'val_f1',
'frequency': x,
}
"""
rank_zero_warn(
"`configure_optimizers` must be implemented to be used with the Lightning Trainer"
)
def manual_backward(self, loss: Tensor, optimizer: Optimizer, *args, **kwargs) -> None:
"""
Call this directly from your training_step when doing optimizations manually.
By using this we can ensure that all the proper scaling when using 16-bit etc has been done for you
This function forwards all args to the .backward() call as well.
.. tip:: In manual mode we still automatically clip grads if Trainer(gradient_clip_val=x) is set
.. tip:: In manual mode we still automatically accumulate grad over batches if
Trainer(accumulate_grad_batches=x) is set and you use `optimizer.step()`
Example::
def training_step(...):
(opt_a, opt_b) = self.optimizers()
loss = ...
# automatically applies scaling, etc...
self.manual_backward(loss, opt_a)
opt_a.step()
"""
# make sure we're using manual opt
self._verify_is_manual_optimization('manual_backward')
# backward
self._running_manual_backward = True
self.trainer.train_loop.backward(loss, optimizer, -1, *args, **kwargs)
self._running_manual_backward = False
def backward(self, loss: Tensor, optimizer: Optimizer, optimizer_idx: int, *args, **kwargs) -> None:
"""
Override backward with your own implementation if you need to.
Args:
loss: Loss is already scaled by accumulated grads
optimizer: Current optimizer being used
optimizer_idx: Index of the current optimizer being used
Called to perform backward step.
Feel free to override as needed.
The loss passed in has already been scaled for accumulated gradients if requested.
Example::
def backward(self, loss, optimizer, optimizer_idx):
loss.backward()
"""
if self.trainer.train_loop.automatic_optimization or self._running_manual_backward:
loss.backward(*args, **kwargs)
def toggle_optimizer(self, optimizer: Optimizer, optimizer_idx: int):
"""
Makes sure only the gradients of the current optimizer's parameters are calculated
in the training step to prevent dangling gradients in multiple-optimizer setup.
.. note:: Only called when using multiple optimizers
Override for your own behavior
Args:
optimizer:
optimizer_idx:
"""
for param in self.parameters():
param.requires_grad = False
for group in optimizer.param_groups:
for param in group['params']:
param.requires_grad = True
def optimizer_step(
self,
epoch: int = None,
batch_idx: int = None,
optimizer: Optimizer = None,
optimizer_idx: int = None,
optimizer_closure: Optional[Callable] = None,
on_tpu: bool = None,
using_native_amp: bool = None,
using_lbfgs: bool = None,
) -> None:
r"""
Override this method to adjust the default way the
:class:`~pytorch_lightning.trainer.trainer.Trainer` calls each optimizer.
By default, Lightning calls ``step()`` and ``zero_grad()`` as shown in the example
once per optimizer.
.. tip:: With `Trainer(enable_pl_optimizer=True)`, you can user `optimizer.step()` directly and it will handle zero_grad, accumulated gradients, AMP, TPU and more automatically for you.
Warning:
If you are overriding this method, make sure that you pass the ``optimizer_closure`` parameter
to ``optimizer.step()`` function as shown in the examples. This ensures that
``train_step_and_backward_closure`` is called within
:meth:`~pytorch_lightning.trainer.training_loop.TrainLoop.run_training_batch`.
Args:
epoch: Current epoch
batch_idx: Index of current batch
optimizer: A PyTorch optimizer
optimizer_idx: If you used multiple optimizers this indexes into that list.
optimizer_closure: closure for all optimizers
on_tpu: true if TPU backward is required
using_native_amp: True if using native amp
using_lbfgs: True if the matching optimizer is lbfgs
Examples:
.. code-block:: python
# DEFAULT
def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx,
optimizer_closure, on_tpu, using_native_amp, using_lbfgs):
optimizer.step(closure=optimizer_closure)
# Alternating schedule for optimizer steps (i.e.: GANs)
def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx,
optimizer_closure, on_tpu, using_native_amp, using_lbfgs):
# update generator opt every 2 steps
if optimizer_idx == 0:
if batch_idx % 2 == 0 :
optimizer.step(closure=optimizer_closure)
optimizer.zero_grad()
# update discriminator opt every 4 steps
if optimizer_idx == 1:
if batch_idx % 4 == 0 :
optimizer.step(closure=optimizer_closure)
optimizer.zero_grad()
# ...
# add as many optimizers as you want
Here's another example showing how to use this for more advanced things such as
learning rate warm-up:
.. code-block:: python
# learning rate warm-up
def optimizer_step(self, epoch, batch_idx, optimizer, optimizer_idx,
optimizer_closure, on_tpu, using_native_amp, using_lbfgs):
# warm up lr
if self.trainer.global_step < 500:
lr_scale = min(1., float(self.trainer.global_step + 1) / 500.)
for pg in optimizer.param_groups:
pg['lr'] = lr_scale * self.learning_rate
# update params
optimizer.step(closure=optimizer_closure)
optimizer.zero_grad()
"""
optimizer.step(closure=optimizer_closure)
def optimizer_zero_grad(
self, epoch: int, batch_idx: int, optimizer: Optimizer, optimizer_idx: int
):
optimizer.zero_grad()
def tbptt_split_batch(self, batch: Tensor, split_size: int) -> list:
r"""
When using truncated backpropagation through time, each batch must be split along the
time dimension. Lightning handles this by default, but for custom behavior override
this function.
Args:
batch: Current batch
split_size: The size of the split
Return:
List of batch splits. Each split will be passed to :meth:`training_step` to enable truncated
back propagation through time. The default implementation splits root level Tensors and
Sequences at dim=1 (i.e. time dim). It assumes that each time dim is the same length.
Examples:
.. code-block:: python
def tbptt_split_batch(self, batch, split_size):
splits = []
for t in range(0, time_dims[0], split_size):
batch_split = []
for i, x in enumerate(batch):
if isinstance(x, torch.Tensor):
split_x = x[:, t:t + split_size]
elif isinstance(x, collections.Sequence):
split_x = [None] * len(x)
for batch_idx in range(len(x)):
split_x[batch_idx] = x[batch_idx][t:t + split_size]
batch_split.append(split_x)
splits.append(batch_split)
return splits
Note:
Called in the training loop after
:meth:`~pytorch_lightning.callbacks.base.Callback.on_batch_start`
if :paramref:`~pytorch_lightning.trainer.Trainer.truncated_bptt_steps` > 0.
Each returned batch split is passed separately to :meth:`training_step`.
"""
time_dims = [
len(x[0])
for x in batch
if isinstance(x, (torch.Tensor, collections.Sequence))
]
assert len(time_dims) >= 1, "Unable to determine batch time dimension"
assert all(
x == time_dims[0] for x in time_dims
), "Batch time dimension length is ambiguous"
splits = []
for t in range(0, time_dims[0], split_size):
batch_split = []
for i, x in enumerate(batch):
if isinstance(x, torch.Tensor):
split_x = x[:, t: t + split_size]
elif isinstance(x, collections.Sequence):
split_x = [None] * len(x)
for batch_idx in range(len(x)):
split_x[batch_idx] = x[batch_idx][t: t + split_size]
batch_split.append(split_x)
splits.append(batch_split)
return splits
def summarize(self, mode: Optional[str] = ModelSummary.MODE_DEFAULT) -> Optional[ModelSummary]:
model_summary = None
if mode in ModelSummary.MODES:
model_summary = ModelSummary(self, mode=mode)
log.info("\n" + str(model_summary))
elif mode is not None:
raise MisconfigurationException(
f"`mode` can be None, {', '.join(ModelSummary.MODES)}, got {mode}"
)
return model_summary
def freeze(self) -> None:
r"""
Freeze all params for inference.
Example:
.. code-block:: python
model = MyLightningModule(...)
model.freeze()
"""
for param in self.parameters():
param.requires_grad = False
self.eval()
def unfreeze(self) -> None:
"""
Unfreeze all parameters for training.
.. code-block:: python
model = MyLightningModule(...)
model.unfreeze()
"""
for param in self.parameters():
param.requires_grad = True
self.train()
def get_progress_bar_dict(self) -> Dict[str, Union[int, str]]:
r"""
Implement this to override the default items displayed in the progress bar.
By default it includes the average loss value, split index of BPTT (if used)
and the version of the experiment when using a logger.
.. code-block::
Epoch 1: 4%|▎ | 40/1095 [00:03<01:37, 10.84it/s, loss=4.501, v_num=10]
Here is an example how to override the defaults:
.. code-block:: python
def get_progress_bar_dict(self):
# don't show the version number
items = super().get_progress_bar_dict()
items.pop("v_num", None)
return items
Return:
Dictionary with the items to be displayed in the progress bar.
"""
# call .item() only once but store elements without graphs
running_train_loss = self.trainer.train_loop.running_loss.mean()
avg_training_loss = None
if running_train_loss is not None:
avg_training_loss = running_train_loss.cpu().item()
elif self.trainer.train_loop.automatic_optimization:
avg_training_loss = float('NaN')
tqdm_dict = {}
if avg_training_loss is not None:
tq | if self.trainer.truncated_bptt_steps is not None:
tqdm_dict["split_idx"] = self.trainer.split_idx
if self.trainer.logger is not None and self.trainer.logger.version is not None:
version = self.trainer.logger.version
# show last 4 places of long version strings
version = version[-4:] if isinstance(version, str) else version
tqdm_dict["v_num"] = version
return tqdm_dict
def _verify_is_manual_optimization(self, fn_name):
if self.trainer.train_loop.automatic_optimization:
raise MisconfigurationException(
f'to use {fn_name}, please disable automatic optimization:'
' set model property `automatic_optimization` as False'
)
@classmethod
def _auto_collect_arguments(cls, frame=None) -> Tuple[Dict, Dict]:
"""
Collect all module arguments in the current constructor and all child constructors.
The child constructors are all the ``__init__`` methods that reach the current class through
(chained) ``super().__init__()`` calls.
Args:
frame: instance frame
Returns:
self_arguments: arguments dictionary of the first instance
parents_arguments: arguments dictionary of the parent's instances
"""
if not frame:
frame = inspect.currentframe()
frame_args = collect_init_args(frame.f_back, [])
self_arguments = frame_args[-1]
# set hyper_parameters in child
self_arguments = self_arguments
parents_arguments = {}
# add all arguments from parents
for args in frame_args[:-1]:
parents_arguments.update(args)
return self_arguments, parents_arguments
def save_hyperparameters(self, *args, frame=None) -> None:
"""Save all model arguments.
Args:
args: single object of `dict`, `NameSpace` or `OmegaConf`
or string names or argumenst from class `__init__`
>>> from collections import OrderedDict
>>> class ManuallyArgsModel(LightningModule):
... def __init__(self, arg1, arg2, arg3):
... super().__init__()
... # manually assign arguments
... self.save_hyperparameters('arg1', 'arg3')
... def forward(self, *args, **kwargs):
... ...
>>> model = ManuallyArgsModel(1, 'abc', 3.14)
>>> model.hparams
"arg1": 1
"arg3": 3.14
>>> class AutomaticArgsModel(LightningModule):
... def __init__(self, arg1, arg2, arg3):
... super().__init__()
... # equivalent automatic
... self.save_hyperparameters()
... def forward(self, *args, **kwargs):
... ...
>>> model = AutomaticArgsModel(1, 'abc', 3.14)
>>> model.hparams
"arg1": 1
"arg2": abc
"arg3": 3.14
>>> class SingleArgModel(LightningModule):
... def __init__(self, params):
... super().__init__()
... # manually assign single argument
... self.save_hyperparameters(params)
... def forward(self, *args, **kwargs):
... ...
>>> model = SingleArgModel(Namespace(p1=1, p2='abc', p3=3.14))
>>> model.hparams
"p1": 1
"p2": abc
"p3": 3.14
"""
if not frame:
frame = inspect.currentframe().f_back
init_args = get_init_args(frame)
assert init_args, "failed to inspect the self init"
if not args:
# take all arguments
hp = init_args
self._hparams_name = "kwargs" if hp else None
else:
# take only listed arguments in `save_hparams`
isx_non_str = [i for i, arg in enumerate(args) if not isinstance(arg, str)]
if len(isx_non_str) == 1:
hp = args[isx_non_str[0]]
cand_names = [k for k, v in init_args.items() if v == hp]
self._hparams_name = cand_names[0] if cand_names else None
else:
hp = {arg: init_args[arg] for arg in args if isinstance(arg, str)}
self._hparams_name = "kwargs"
# `hparams` are expected here
if hp:
self._set_hparams(hp)
# make deep copy so there is not other runtime changes reflected
self._hparams_initial = copy.deepcopy(self._hparams)
def _set_hparams(self, hp: Union[dict, Namespace, str]) -> None:
if isinstance(hp, Namespace):
hp = vars(hp)
if isinstance(hp, dict):
hp = AttributeDict(hp)
elif isinstance(hp, PRIMITIVE_TYPES):
raise ValueError(f"Primitives {PRIMITIVE_TYPES} are not allowed.")
elif not isinstance(hp, ALLOWED_CONFIG_TYPES):
raise ValueError(f"Unsupported config type of {type(hp)}.")
if isinstance(hp, dict) and isinstance(self.hparams, dict):
self.hparams.update(hp)
else:
self._hparams = hp
@torch.no_grad()
def to_onnx(
self,
file_path: Union[str, Path],
input_sample: Optional[Any] = None,
**kwargs,
):
"""
Saves the model in ONNX format
Args:
file_path: The path of the file the onnx model should be saved to.
input_sample: An input for tracing. Default: None (Use self.example_input_array)
**kwargs: Will be passed to torch.onnx.export function.
Example:
>>> class SimpleModel(LightningModule):
... def __init__(self):
... super().__init__()
... self.l1 = torch.nn.Linear(in_features=64, out_features=4)
...
... def forward(self, x):
... return torch.relu(self.l1(x.view(x.size(0), -1)))
>>> with tempfile.NamedTemporaryFile(suffix='.onnx', delete=False) as tmpfile:
... model = SimpleModel()
... input_sample = torch.randn((1, 64))
... model.to_onnx(tmpfile.name, input_sample, export_params=True)
... os.path.isfile(tmpfile.name)
True
"""
mode = self.training
if input_sample is None:
if self.example_input_array is None:
raise ValueError(
"Could not export to ONNX since neither `input_sample` nor"
" `model.example_input_array` attribute is set."
)
input_sample = self.example_input_array
input_sample = self.transfer_batch_to_device(input_sample)
if "example_outputs" not in kwargs:
self.eval()
kwargs["example_outputs"] = self(input_sample)
torch.onnx.export(self, input_sample, file_path, **kwargs)
self.train(mode)
@torch.no_grad()
def to_torchscript(
self,
file_path: Optional[Union[str, Path]] = None,
method: Optional[str] = 'script',
example_inputs: Optional[Any] = None,
**kwargs,
) -> Union[ScriptModule, Dict[str, ScriptModule]]:
"""
By default compiles the whole model to a :class:`~torch.jit.ScriptModule`.
If you want to use tracing, please provided the argument `method='trace'` and make sure that either the
example_inputs argument is provided, or the model has self.example_input_array set.
If you would like to customize the modules that are scripted you should override this method.
In case you want to return multiple modules, we recommend using a dictionary.
Args:
file_path: Path where to save the torchscript. Default: None (no file saved).
method: Whether to use TorchScript's script or trace method. Default: 'script'
example_inputs: An input to be used to do tracing when method is set to 'trace'.
Default: None (Use self.example_input_array)
**kwargs: Additional arguments that will be passed to the :func:`torch.jit.script` or
:func:`torch.jit.trace` function.
Note:
- Requires the implementation of the
:meth:`~pytorch_lightning.core.lightning.LightningModule.forward` method.
- The exported script will be set to evaluation mode.
- It is recommended that you install the latest supported version of PyTorch
to use this feature without limitations. See also the :mod:`torch.jit`
documentation for supported features.
Example:
>>> class SimpleModel(LightningModule):
... def __init__(self):
... super().__init__()
... self.l1 = torch.nn.Linear(in_features=64, out_features=4)
...
... def forward(self, x):
... return torch.relu(self.l1(x.view(x.size(0), -1)))
...
>>> model = SimpleModel()
>>> torch.jit.save(model.to_torchscript(), "model.pt") # doctest: +SKIP
>>> os.path.isfile("model.pt") # doctest: +SKIP
>>> torch.jit.save(model.to_torchscript(file_path="model_trace.pt", method='trace', # doctest: +SKIP
... example_inputs=torch.randn(1, 64))) # doctest: +SKIP
>>> os.path.isfile("model_trace.pt") # doctest: +SKIP
True
Return:
This LightningModule as a torchscript, regardless of whether file_path is
defined or not.
"""
mode = self.training
if method == 'script':
torchscript_module = torch.jit.script(self.eval(), **kwargs)
elif method == 'trace':
# if no example inputs are provided, try to see if model has example_input_array set
if example_inputs is None:
if self.example_input_array is None:
raise ValueError(
'Choosing method=`trace` requires either `example_inputs`'
' or `model.example_input_array` to be defined'
)
example_inputs = self.example_input_array
# automatically send example inputs to the right device and use trace
example_inputs = self.transfer_batch_to_device(example_inputs)
torchscript_module = torch.jit.trace(func=self.eval(), example_inputs=example_inputs, **kwargs)
else:
raise ValueError("The 'method' parameter only supports 'script' or 'trace',"
f" but value given was: {method}")
self.train(mode)
if file_path is not None:
torch.jit.save(torchscript_module, file_path)
return torchscript_module
@property
def hparams(self) -> Union[AttributeDict, dict, Namespace]:
if not hasattr(self, "_hparams"):
self._hparams = AttributeDict()
return self._hparams
@property
def hparams_initial(self) -> AttributeDict:
if not hasattr(self, "_hparams_initial"):
return AttributeDict()
# prevent any change
return copy.deepcopy(self._hparams_initial)
@hparams.setter
def hparams(self, hp: Union[dict, Namespace, Any]):
# TODO: remove this method in v1.3.0.
rank_zero_warn(
"The setter for self.hparams in LightningModule is deprecated since v1.1.0 and will be"
" removed in v1.3.0. Replace the assignment `self.hparams = hparams` with "
" `self.save_hyperparameters()`.",
DeprecationWarning
)
hparams_assignment_name = self.__get_hparams_assignment_variable()
self._hparams_name = hparams_assignment_name
self._set_hparams(hp)
# this resolves case when user does not uses `save_hyperparameters` and do hard assignement in init
if not hasattr(self, "_hparams_initial"):
self._hparams_initial = copy.deepcopy(self._hparams)
def __get_hparams_assignment_variable(self):
"""
looks at the code of the class to figure out what the user named self.hparams
this only happens when the user explicitly sets self.hparams
"""
try:
class_code = inspect.getsource(self.__class__)
lines = class_code.split("\n")
for line in lines:
line = re.sub(r"\s+", "", line, flags=re.UNICODE)
if ".hparams=" in line:
return line.split("=")[1]
except Exception:
return "hparams"
return None
| dm_dict["loss"] = f"{avg_training_loss:.3g}"
|
program.go | package xgp
import (
"encoding/json"
"errors"
"github.com/MaxHalford/xgp/metrics"
"github.com/MaxHalford/xgp/op"
"github.com/gonum/floats"
)
// A Program is a thin layer on top of an Operator.
type Program struct {
*GP
Op op.Operator
}
// String formatting.
func (prog Program) String() string {
return prog.Op.String()
}
// Classification determines if the Program has to perform classification or
// not. It does so by looking at the GP's LossMetric.
func (prog Program) classification() bool {
if prog.GP != nil {
if prog.GP.LossMetric != nil {
return prog.GP.LossMetric.Classification()
}
}
return false
}
// Predict predicts the output of a slice of features.
func (prog Program) Predict(X [][]float64, proba bool) ([]float64, error) {
// Make predictions
yPred := prog.Op.Eval(X)
// Check the predictions don't contain any NaNs
if floats.HasNaN(yPred) {
return nil, errors.New("yPred contains NaNs")
}
// Regression
if !prog.classification() {
return yPred, nil
}
// Classification
var transform = map[bool]func(float64) float64{true: sigmoid, false: binary}[proba]
for i, y := range yPred {
yPred[i] = transform(y)
}
return yPred, nil
}
// PredictPartial is a convenience function on top of Predict to make
// predictions on a single instance.
func (prog Program) PredictPartial(x []float64, proba bool) (float64, error) {
var X = make([][]float64, len(x))
for i, xi := range x {
X[i] = []float64{xi}
}
yPred, err := prog.Predict(X, proba)
if err != nil {
return 0, err
}
return yPred[0], nil
}
type serialProgram struct {
Op op.SerialOp `json:"op"`
LossMetric string `json:"loss_metric"`
}
// MarshalJSON serializes a Program.
func (prog Program) MarshalJSON() ([]byte, error) {
return json.Marshal(&serialProgram{
Op: op.SerializeOp(prog.Op),
LossMetric: prog.GP.LossMetric.String(),
})
}
// UnmarshalJSON parses a Program.
func (prog *Program) UnmarshalJSON(bytes []byte) error {
var serial = &serialProgram{}
if err := json.Unmarshal(bytes, serial); err != nil |
loss, err := metrics.ParseMetric(serial.LossMetric, 1)
if err != nil {
return err
}
operator, err := op.ParseOp(serial.Op)
if err != nil {
return err
}
prog.Op = operator
prog.GP = &GP{LossMetric: loss}
return nil
}
| {
return err
} |
input.rs | use log::info;
use mercat_common::{gen_seed, save_config};
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use structopt::StructOpt;
#[derive(Clone, Debug, Serialize, Deserialize, StructOpt)]
pub struct CreateAccountInfo {
/// The name of the user. The name can be any valid string that can be used as a file name.
/// It is the responsibility of the caller to ensure the uniqueness of the name.
#[structopt(short, long, help = "The name of the user. This name must be unique.")]
pub user: String,
/// The directory that will serve as the database of the on/off-chain data and will be used
/// to save and load the data that in a real execution would be written to the on/off the
/// blockchain. Defaults to the current directory. This directory will have two main
/// sub-directories: `on-chain` and `off-chain`.
#[structopt(
parse(from_os_str),
help = "The directory to load and save the input and output files. Defaults to current directory.",
short,
long
)]
pub db_dir: Option<PathBuf>,
/// An asset ticker name which is a string of at most 12 characters.
/// In these test CLIs, the unique account id is created from the pair of username and ticker.
#[structopt(
short,
long,
help = "The asset ticker name. String of at most 12 characters."
)]
pub ticker: String,
/// An optional seed, to feed to the RNG, that can be passed to reproduce a previous run of this CLI.
/// The seed can be found inside the logs.
#[structopt(
long,
help = "Base64 encoding of an initial seed for the RNG. If not provided, the seed will be chosen at random."
)]
pub seed: Option<String>,
/// An optional path to save the config used for this experiment.
#[structopt(
parse(from_os_str),
long,
help = "Path to save the input command line arguments as a config file."
)]
pub save_config: Option<PathBuf>,
/// Instructs the CLI to act as a cheater.
#[structopt(long, help = "Instructs the CLI to act as a cheater.")]
pub cheat: bool,
/// Transaction id.
#[structopt(long, help = "Transaction id.")]
pub tx_id: u32,
/// Instructs the CLI to print the transaction data in stdout.
#[structopt(
long,
help = "Instructs the CLI to print the transaction data in stdout."
)]
pub stdout: bool,
}
#[derive(Clone, Debug, Serialize, Deserialize, StructOpt)]
pub struct DecryptAccountInfo {
/// The name of the user. The name can be any valid string that can be used as a file name.
#[structopt(short, long, help = "The name of the user. This name must be unique.")]
pub user: String,
/// The directory that will serve as the database of the on/off-chain data and will be used
/// to save and load the data that in a real execution would be written to the on/off the
/// blockchain. Defaults to the current directory. This directory will have two main
/// sub-directories: `on-chain` and `off-chain`.
#[structopt(
parse(from_os_str),
help = "The directory to load and save the input and output files. Defaults to current directory.",
short,
long
)]
pub db_dir: Option<PathBuf>,
/// An asset ticker name which is a string of at most 12 characters.
/// In these test CLIs, the unique account id is created from the pair of username and ticker.
#[structopt(
short,
long,
help = "The asset ticker name. String of at most 12 characters."
)]
pub ticker: String,
}
#[derive(Clone, Debug, Serialize, Deserialize, StructOpt)]
pub struct IssueAssetInfo {
/// Account ID of the issuer will be generated from the username and ticker name pair.
#[structopt(
long,
help = "The ticker name that will be used to generate the unique account id of the user."
)]
pub account_id_from_ticker: String,
/// A transaction ID for the asset issuance transaction.
/// The CLI will not throw any errors if a duplicate id is passed.
/// It will silently overwrite the transaction.
#[structopt(long, help = "The transaction ID.")]
pub tx_id: u32,
/// An optional seed, to feed to the RNG, that can be passed to reproduce a previous run of this CLI.
/// The seed can be found inside the logs.
#[structopt(
long,
help = "Base64 encoding of an initial seed for the RNG. If not provided, the seed will be chosen at random."
)]
pub seed: Option<String>,
/// Amount to issue.
#[structopt(short, long, help = "The amount of assets to issue.")]
pub amount: u32,
/// The directory that will serve as the database of the on/off-chain data and will be used
/// to save and load the data that in a real execution would be written to the on/off the
/// blockchain. Defaults to the current directory. This directory will have two main
/// sub-directories: `on-chain` and `off-chain`.
#[structopt(
parse(from_os_str),
help = "The directory to load and save the input and output files. Defaults to current directory.",
short,
long
)]
pub db_dir: Option<PathBuf>,
/// The issuer's name. An account must have already been created for this user.
#[structopt(short, long, help = "The name of the issuer.")]
pub issuer: String,
/// An optional path to save the config used for this experiment.
#[structopt(
parse(from_os_str),
long,
help = "Path to save the input command line arguments as a config file."
)]
pub save_config: Option<PathBuf>,
/// Instructs the CLI to print the transaction data in stdout.
#[structopt(
long,
help = "Instructs the CLI to print the transaction data in stdout."
)]
pub stdout: bool,
/// Instructs the CLI to act as a cheater.
#[structopt(long, help = "Instructs the CLI to act as a cheater.")]
pub cheat: bool,
}
#[derive(Clone, Debug, Serialize, Deserialize, StructOpt)]
pub struct CreateTransactionInfo {
/// Account ID of the issuer will be generated from the username and ticker name pair.
#[structopt(
long,
help = "The ticker name that will be used to generate the unique account id of the user."
)]
pub account_id_from_ticker: String,
/// A transaction ID for the transaction.
/// The CLI will not throw any errors if a duplicate id is passed.
/// It will silently overwrite the transaction.
#[structopt(long, help = "The transaction ID.")]
pub tx_id: u32,
/// An optional seed, to feed to the RNG, that can be passed to reproduce a previous run of this CLI.
/// The seed can be found inside the logs.
#[structopt(
long,
help = "Base64 encoding of an initial seed for the RNG. If not provided, the seed will be chosen at random."
)]
pub seed: Option<String>,
/// Amount to transfer.
#[structopt(short, long, help = "The amount of assets to transfer.")]
pub amount: u32,
/// The directory that will serve as the database of the on/off-chain data and will be used
/// to save and load the data that in a real execution would be written to the on/off the
/// blockchain. Defaults to the current directory. This directory will have two main
/// sub-directories: `on-chain` and `off-chain`.
#[structopt(
parse(from_os_str),
help = "The directory to load and save the input and output files. Defaults to current directory.",
short,
long
)]
pub db_dir: Option<PathBuf>,
/// The sender's name. An account must have already been created for this user.
#[structopt(long, help = "The sender's name.")]
pub sender: String,
/// The receiver's name. An account must have already been created for this user.
#[structopt(short, long, help = "The sender's name.")]
pub receiver: String,
/// The transaction mediator's name. Used to retrieve mediator's public keys.
/// Use `mercat-mediator` CLI to create the credentials needed for this role.
#[structopt(short, long, help = "The mediator's name.")]
pub mediator: String,
/// An optional path to save the config used for this experiment.
#[structopt(
parse(from_os_str),
long,
help = "Path to save the input command line arguments as a config file."
)]
pub save_config: Option<PathBuf>,
/// Instructs the CLI to print the transaction data in stdout.
#[structopt(
long,
help = "Instructs the CLI to print the transaction data in stdout."
)]
pub stdout: bool,
/// Instructs the CLI to act as a cheater.
#[structopt(long, help = "Instructs the CLI to act as a cheater.")]
pub cheat: bool,
}
#[derive(Clone, Debug, Serialize, Deserialize, StructOpt)]
pub struct FinalizeTransactionInfo {
/// Account ID of the receiver will be generated from the username and ticker name pair.
#[structopt(
long,
help = "The ticker name that will be used to generate the unique account id of the user."
)]
pub account_id_from_ticker: String,
/// The transaction ID for the transaction.
/// The CLI will not throw any errors if a duplicate id is passed.
/// It will silently overwrite the transaction.
#[structopt(long, help = "The transaction ID.")]
pub tx_id: u32,
/// An optional seed, to feed to the RNG, that can be passed to reproduce a previous run of this CLI.
/// The seed can be found inside the logs.
#[structopt(
long,
help = "Base64 encoding of an initial seed for the RNG. If not provided, the seed will be chosen at random."
)]
pub seed: Option<String>,
/// The expected amount to receive.
#[structopt(short, long, help = "The expected amount to receive.")]
pub amount: u32,
/// The directory that will serve as the database of the on/off-chain data and will be used
/// to save and load the data that in a real execution would be written to the on/off the
/// blockchain. Defaults to the current directory. This directory will have two main
/// sub-directories: `on-chain` and `off-chain`.
#[structopt(
parse(from_os_str),
help = "The directory to load and save the input and output files. Defaults to current directory.",
short,
long
)]
pub db_dir: Option<PathBuf>,
// TODO(CRYP-110)
// Depending on how we decide to name transaction files, we may or may not need the sender's name.
/// The sender's name. An account must have already been created for this user.
#[structopt(long, help = "The sender's name.")]
pub sender: String,
/// The receiver's name. An account must have already been created for this user.
#[structopt(short, long, help = "The sender's name.")]
pub receiver: String,
/// An optional path to save the config used for this experiment.
#[structopt(
parse(from_os_str),
long,
help = "Path to save the input command line arguments as a config file."
)]
pub save_config: Option<PathBuf>,
/// Instructs the CLI to print the transaction data in stdout.
#[structopt(
long,
help = "Instructs the CLI to print the transaction data in stdout."
)]
pub stdout: bool,
/// Instructs the CLI to act as a cheater.
#[structopt(long, help = "Instructs the CLI to act as a cheater.")]
pub cheat: bool,
}
#[derive(Clone, Debug, Serialize, Deserialize, StructOpt)]
pub enum CLI {
/// Create a MERCAT account using command line arguments.
Create(CreateAccountInfo),
/// Create a MERCAT account from a config file.
CreateFrom {
/// The path to the config file. This is a positional argument.
config: PathBuf,
},
/// Issue an asset to a MERCAT account.
Issue(IssueAssetInfo),
/// Create a MERCAT transaction.
CreateTransaction(CreateTransactionInfo),
/// Finalize a MERCAT transaction.
FinalizeTransaction(FinalizeTransactionInfo),
/// Decrypt the account balance.
Decrypt(DecryptAccountInfo),
}
pub fn parse_input() -> CLI | {
info!("Parsing input configuration.");
let args: CLI = CLI::from_args();
match args {
CLI::Create(cfg) => {
let db_dir = cfg.db_dir.clone().or_else(|| std::env::current_dir().ok());
let seed: Option<String> = cfg.seed.clone().or_else(|| Some(gen_seed()));
info!("Seed: {:?}", seed.clone().unwrap()); // unwrap won't panic
let cfg = CreateAccountInfo {
save_config: cfg.save_config.clone(),
seed,
ticker: cfg.ticker,
db_dir,
user: cfg.user.clone(),
cheat: cfg.cheat,
tx_id: cfg.tx_id,
stdout: cfg.stdout,
};
info!(
"Parsed the following config from the command line:\n{:#?}",
cfg
);
// Save the config if the argument is passed.
save_config(cfg.save_config.clone(), &cfg);
CLI::Create(cfg)
}
CLI::CreateFrom { config } => {
let json_file_content = std::fs::read_to_string(&config).unwrap_or_else(|_| {
panic!("Failed to read the account config from file: {:?}.", config)
});
let cfg = serde_json::from_str(&json_file_content).unwrap_or_else(|error| {
panic!("Failed to deserialize the account config: {}", error)
});
info!("Read the following config from {:?}:\n{:#?}", &config, &cfg);
CLI::Create(cfg)
}
CLI::Decrypt(cfg) => {
let db_dir = cfg.db_dir.clone().or_else(|| std::env::current_dir().ok());
let cfg = DecryptAccountInfo {
ticker: cfg.ticker,
db_dir,
user: cfg.user,
};
info!(
"Parsed the following config from the command line:\n{:#?}",
cfg
);
CLI::Decrypt(cfg)
}
CLI::Issue(cfg) => {
let db_dir = cfg.db_dir.clone().or_else(|| std::env::current_dir().ok());
let seed: Option<String> = cfg.seed.clone().or_else(|| Some(gen_seed()));
info!("Seed: {:?}", seed.clone().unwrap()); // unwrap won't panic
let cfg = IssueAssetInfo {
account_id_from_ticker: cfg.account_id_from_ticker,
tx_id: cfg.tx_id,
seed,
amount: cfg.amount,
db_dir,
issuer: cfg.issuer,
save_config: cfg.save_config.clone(),
stdout: cfg.stdout,
cheat: cfg.cheat,
};
info!(
"Parsed the following config from the command line:\n{:#?}",
cfg
);
// Save the config if the argument is passed.
save_config(cfg.save_config.clone(), &cfg);
CLI::Issue(cfg)
}
CLI::CreateTransaction(cfg) => {
let db_dir = cfg.db_dir.clone().or_else(|| std::env::current_dir().ok());
let seed: Option<String> = cfg.seed.clone().or_else(|| Some(gen_seed()));
info!("Seed: {:?}", seed.clone().unwrap());
let cfg = CreateTransactionInfo {
account_id_from_ticker: cfg.account_id_from_ticker,
tx_id: cfg.tx_id,
seed,
amount: cfg.amount,
db_dir,
sender: cfg.sender,
receiver: cfg.receiver,
mediator: cfg.mediator,
save_config: cfg.save_config.clone(),
stdout: cfg.stdout,
cheat: cfg.cheat,
};
info!(
"Parsed the following config from the command line:\n{:#?}",
cfg
);
// Save the config if the argument is passed.
save_config(cfg.save_config.clone(), &cfg);
CLI::CreateTransaction(cfg)
}
CLI::FinalizeTransaction(cfg) => {
let db_dir = cfg.db_dir.clone().or_else(|| std::env::current_dir().ok());
let seed: Option<String> = cfg.seed.clone().or_else(|| Some(gen_seed()));
info!("Seed: {:?}", seed.clone().unwrap());
let cfg = FinalizeTransactionInfo {
tx_id: cfg.tx_id,
account_id_from_ticker: cfg.account_id_from_ticker,
seed,
amount: cfg.amount,
db_dir,
sender: cfg.sender,
receiver: cfg.receiver,
save_config: cfg.save_config.clone(),
stdout: cfg.stdout,
cheat: cfg.cheat,
};
info!(
"Parsed the following config from the command line:\n{:#?}",
cfg
);
// Save the config if the argument is passed.
save_config(cfg.save_config.clone(), &cfg);
CLI::FinalizeTransaction(cfg)
}
}
} |
|
build.rs | use std::env;
fn main() | {
let target = env::var("TARGET").unwrap();
let android = target.contains("android");
// Export shared libraries search path.
if android {
let abi = {
if target.contains("aarch64") { "arm64-v8a" }
else { "armeabi-v7a" }
};
println!("cargo:rustc-link-search={}/VrApi-1.40/Libs/Android/{}/Release", env!("CARGO_MANIFEST_DIR"), abi);
println!("cargo:rustc-link-lib=dylib=vrapi");
}
} |
|
studio_mode.rs | #![cfg(feature = "test-integration")]
use anyhow::Result;
use obws::requests::Transition;
use time::Duration;
use crate::common::{TEST_SCENE_2, TEST_TRANSITION};
mod common;
#[tokio::test]
async fn main() -> Result<()> | {
let client = common::new_client().await?;
let client = client.studio_mode();
client.get_studio_mode_status().await?;
client.enable_studio_mode().await?;
let original = client.get_preview_scene().await?.name;
client.set_preview_scene(TEST_SCENE_2).await?;
client.set_preview_scene(&original).await?;
client
.transition_to_program(Some(Transition {
name: TEST_TRANSITION,
duration: Some(Duration::milliseconds(10)),
}))
.await?;
client.disable_studio_mode().await?;
client.toggle_studio_mode().await?;
client.toggle_studio_mode().await?;
Ok(())
} |
|
mod.rs | use crate::query::Query;
use crate::schema::Field;
use crate::schema::Value;
use crate::tokenizer::{TextAnalyzer, Token};
use crate::Searcher;
use crate::{Document, Score};
use htmlescape::encode_minimal;
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
const DEFAULT_MAX_NUM_CHARS: usize = 150;
#[derive(Debug)]
pub struct HighlightSection {
start: usize,
stop: usize,
}
impl HighlightSection {
fn new(start: usize, stop: usize) -> HighlightSection {
HighlightSection { start, stop }
}
/// Returns the bounds of the `HighlightSection`.
pub fn bounds(&self) -> (usize, usize) {
(self.start, self.stop)
}
}
#[derive(Debug)]
pub struct FragmentCandidate {
score: Score,
start_offset: usize,
stop_offset: usize,
num_chars: usize,
highlighted: Vec<HighlightSection>,
}
impl FragmentCandidate {
/// Create a basic `FragmentCandidate`
///
/// `score`, `num_chars` are set to 0
/// and `highlighted` is set to empty vec
/// stop_offset is set to start_offset, which is taken as a param.
fn new(start_offset: usize) -> FragmentCandidate {
FragmentCandidate {
score: 0.0,
start_offset,
stop_offset: start_offset,
num_chars: 0,
highlighted: vec![],
}
}
/// Updates `score` and `highlighted` fields of the objects.
///
/// taking the token and terms, the token is added to the fragment.
/// if the token is one of the terms, the score
/// and highlighted fields are updated in the fragment.
fn try_add_token(&mut self, token: &Token, terms: &BTreeMap<String, Score>) {
self.stop_offset = token.offset_to;
if let Some(&score) = terms.get(&token.text.to_lowercase()) {
self.score += score;
self.highlighted
.push(HighlightSection::new(token.offset_from, token.offset_to));
}
}
}
/// `Snippet`
/// Contains a fragment of a document, and some highlighed parts inside it.
#[derive(Debug)]
pub struct Snippet {
fragments: String,
highlighted: Vec<HighlightSection>,
}
const HIGHLIGHTEN_PREFIX: &str = "<b>";
const HIGHLIGHTEN_POSTFIX: &str = "</b>";
impl Snippet {
/// Create a new, empty, `Snippet`
pub fn empty() -> Snippet {
Snippet {
fragments: String::new(),
highlighted: Vec::new(),
}
}
/// Returns a hignlightned html from the `Snippet`.
pub fn to_html(&self) -> String {
let mut html = String::new();
let mut start_from: usize = 0;
for item in self.highlighted.iter() {
html.push_str(&encode_minimal(&self.fragments[start_from..item.start]));
html.push_str(HIGHLIGHTEN_PREFIX);
html.push_str(&encode_minimal(&self.fragments[item.start..item.stop]));
html.push_str(HIGHLIGHTEN_POSTFIX);
start_from = item.stop;
}
html.push_str(&encode_minimal(
&self.fragments[start_from..self.fragments.len()],
));
html
}
/// Returns a fragment from the `Snippet`.
pub fn fragments(&self) -> &str {
&self.fragments
}
/// Returns a list of higlighted positions from the `Snippet`.
pub fn highlighted(&self) -> &[HighlightSection] {
&self.highlighted
}
}
/// Returns a non-empty list of "good" fragments.
///
/// If no target term is within the text, then the function
/// should return an empty Vec.
///
/// If a target term is within the text, then the returned
/// list is required to be non-empty.
///
/// The returned list is non-empty and contain less
/// than 12 possibly overlapping fragments.
///
/// All fragments should contain at least one target term
/// and have at most `max_num_chars` characters (not bytes).
///
/// It is ok to emit non-overlapping fragments, for instance,
/// one short and one long containing the same keyword, in order
/// to leave optimization opportunity to the fragment selector
/// upstream.
///
/// Fragments must be valid in the sense that `&text[fragment.start..fragment.stop]`\
/// has to be a valid string.
fn search_fragments<'a>(
tokenizer: &TextAnalyzer,
text: &'a str,
terms: &BTreeMap<String, Score>,
max_num_chars: usize,
) -> Vec<FragmentCandidate> {
let mut token_stream = tokenizer.token_stream(text);
let mut fragment = FragmentCandidate::new(0);
let mut fragments: Vec<FragmentCandidate> = vec![];
while let Some(next) = token_stream.next() {
if (next.offset_to - fragment.start_offset) > max_num_chars {
if fragment.score > 0.0 {
fragments.push(fragment)
};
fragment = FragmentCandidate::new(next.offset_from);
}
fragment.try_add_token(next, &terms);
}
if fragment.score > 0.0 {
fragments.push(fragment)
}
fragments
}
/// Returns a Snippet
///
/// Takes a vector of `FragmentCandidate`s and the text.
/// Figures out the best fragment from it and creates a snippet.
fn select_best_fragment_combination(fragments: &[FragmentCandidate], text: &str) -> Snippet {
let best_fragment_opt = fragments.iter().max_by(|left, right| {
let cmp_score = left
.score
.partial_cmp(&right.score)
.unwrap_or(Ordering::Equal);
if cmp_score == Ordering::Equal {
(right.start_offset, right.stop_offset).cmp(&(left.start_offset, left.stop_offset))
} else {
cmp_score
}
});
if let Some(fragment) = best_fragment_opt {
let fragment_text = &text[fragment.start_offset..fragment.stop_offset];
let highlighted = fragment
.highlighted
.iter()
.map(|item| {
HighlightSection::new(
item.start - fragment.start_offset,
item.stop - fragment.start_offset,
)
})
.collect();
Snippet {
fragments: fragment_text.to_string(),
highlighted,
}
} else {
// when there no fragments to chose from,
// for now create a empty snippet
Snippet {
fragments: String::new(),
highlighted: vec![],
}
}
}
/// `SnippetGenerator`
///
/// # Example
///
/// ```rust
/// # use tantivy::query::QueryParser;
/// # use tantivy::schema::{Schema, TEXT};
/// # use tantivy::{doc, Index};
/// use tantivy::SnippetGenerator;
///
/// # fn main() -> tantivy::Result<()> {
/// # let mut schema_builder = Schema::builder();
/// # let text_field = schema_builder.add_text_field("text", TEXT);
/// # let schema = schema_builder.build();
/// # let index = Index::create_in_ram(schema);
/// # let mut index_writer = index.writer_with_num_threads(1, 10_000_000)?;
/// # let doc = doc!(text_field => r#"Comme je descendais des Fleuves impassibles,
/// # Je ne me sentis plus guidé par les haleurs :
/// # Des Peaux-Rouges criards les avaient pris pour cibles,
/// # Les ayant cloués nus aux poteaux de couleurs.
/// #
/// # J'étais insoucieux de tous les équipages,
/// # Porteur de blés flamands ou de cotons anglais.
/// # Quand avec mes haleurs ont fini ces tapages,
/// # Les Fleuves m'ont laissé descendre où je voulais.
/// # "#);
/// # index_writer.add_document(doc.clone());
/// # index_writer.commit()?;
/// # let query_parser = QueryParser::for_index(&index, vec![text_field]);
/// // ...
/// let query = query_parser.parse_query("haleurs flamands").unwrap();
/// # let reader = index.reader()?;
/// # let searcher = reader.searcher();
/// let mut snippet_generator = SnippetGenerator::create(&searcher, &*query, text_field)?;
/// snippet_generator.set_max_num_chars(100);
/// let snippet = snippet_generator.snippet_from_doc(&doc);
/// let snippet_html: String = snippet.to_html();
/// assert_eq!(snippet_html, "Comme je descendais des Fleuves impassibles,\n Je ne me sentis plus guidé par les <b>haleurs</b> :\n Des");
/// # Ok(())
/// # }
/// ```
pub struct SnippetGenerator {
terms_text: BTreeMap<String, Score>,
tokenizer: TextAnalyzer,
field: Field,
max_num_chars: usize,
}
impl SnippetGenerator {
/// Creates a new snippet generator
pub fn create(
searcher: &Searcher,
query: &dyn Query,
field: Field,
) -> crate::Result<SnippetGenerator> {
let mut terms = BTreeSet::new();
query.query_terms(&mut terms);
let mut terms_text: BTreeMap<String, Score> = Default::default();
for term in terms {
if term.field() != field {
continue;
}
let doc_freq = searcher.doc_freq(&term)?;
if doc_freq > 0 {
let score = 1.0 / (1.0 + doc_freq as Score);
terms_text.insert(term.text().to_string(), score);
}
}
let tokenizer = searcher.index().tokenizer_for_field(field)?;
Ok(SnippetGenerator {
terms_text,
tokenizer,
field,
max_num_chars: DEFAULT_MAX_NUM_CHARS,
})
}
/// Sets a maximum number of chars.
pub fn set_max_num_chars(&mut self, max_num_chars: usize) {
self.max_num_chars = max_num_chars;
}
#[cfg(test)]
pub fn terms_text(&self) -> &BTreeMap<String, Score> {
&self.terms_text
}
/// Generates a snippet for the given `Document`.
///
/// This method extract the text associated to the `SnippetGenerator`'s field
/// and computes a snippet.
pub fn snippet_from_doc(&self, doc: &Document) -> Snippet {
let text: String = doc
.get_all(self.field)
.flat_map(Value::text)
.collect::<Vec<&str>>()
.join(" ");
self.snippet(&text)
}
/// Generates a snippet for the given text.
pub fn snippet(&self, text: &str) -> Snippet {
let fragment_candidates =
search_fragments(&self.tokenizer, &text, &self.terms_text, self.max_num_chars);
select_best_fragment_combination(&fragment_candidates[..], &text)
}
}
#[cfg(test)]
mod tests {
use super::{search_fragments, select_best_fragment_combination};
use crate::query::QueryParser;
use crate::schema::{IndexRecordOption, Schema, TextFieldIndexing, TextOptions, TEXT};
use crate::tokenizer::SimpleTokenizer;
use crate::Index;
use crate::SnippetGenerator;
use maplit::btreemap;
use std::collections::BTreeMap;
use std::iter::Iterator;
const TEST_TEXT: &'static str = r#"Rust is a systems programming language sponsored by
Mozilla which describes it as a "safe, concurrent, practical language", supporting functional and
imperative-procedural paradigms. Rust is syntactically similar to C++[according to whom?],
but its designers intend it to provide better memory safety while still maintaining
performance.
Rust is free and open-source software, released under an MIT License, or Apache License
2.0. Its designers have refined the language through the experiences of writing the Servo
web browser layout engine[14] and the Rust compiler. A large proportion of current commits
to the project are from community members.[15]
Rust won first place for "most loved programming language" in the Stack Overflow Developer
Survey in 2016, 2017, and 2018."#;
#[test]
fn test_snippet() {
let terms = btreemap! {
String::from("rust") => 1.0,
String::from("language") => 0.9
};
let fragments = search_fragments(&From::from(SimpleTokenizer), TEST_TEXT, &terms, 100);
assert_eq!(fragments.len(), 7);
{
let first = &fragments[0];
assert_eq!(first.score, 1.9);
assert_eq!(first.stop_offset, 89);
}
let snippet = select_best_fragment_combination(&fragments[..], &TEST_TEXT);
assert_eq!(
snippet.fragments,
"Rust is a systems programming language sponsored by\n\
Mozilla which describes it as a \"safe"
);
assert_eq!(
snippet.to_html(),
"<b>Rust</b> is a systems programming <b>language</b> \
sponsored by\nMozilla which describes it as a "safe"
)
}
#[test]
fn test_snippet_scored_fragment() {
{
let terms = btreemap! {
String::from("rust") =>1.0,
String::from("language") => 0.9
};
let fragments = search_fragments(&From::from(SimpleTokenizer), TEST_TEXT, &terms, 20);
{
let first = &fragments[0];
assert_eq!(first.score, 1.0);
assert_eq!(first.stop_offset, 17);
}
let snippet = select_best_fragment_combination(&fragments[..], &TEST_TEXT);
assert_eq!(snippet.to_html(), "<b>Rust</b> is a systems")
}
{
let terms = btreemap! {
String::from("rust") =>0.9,
String::from("language") => 1.0
};
let fragments = search_fragments(&From::from(SimpleTokenizer), TEST_TEXT, &terms, 20);
//assert_eq!(fragments.len(), 7);
{
let first = &fragments[0];
assert_eq!(first.score, 0.9);
assert_eq!(first.stop_offset, 17);
}
let snippet = select_best_fragment_combination(&fragments[..], &TEST_TEXT);
assert_eq!(snippet.to_html(), "programming <b>language</b>")
}
}
#[test]
fn test_snippet_in_second_fragment() {
let text = "a b c d e f g";
let mut terms = BTreeMap::new();
terms.insert(String::from("c"), 1.0);
let fragments = search_fragments(&From::from(SimpleTokenizer), &text, &terms, 3);
assert_eq!(fragments.len(), 1);
{
let first = fragments.iter().nth(0).unwrap();
assert_eq!(first.score, 1.0);
assert_eq!(first.start_offset, 4);
assert_eq!(first.stop_offset, 7);
}
let snippet = select_best_fragment_combination(&fragments[..], &text);
assert_eq!(snippet.fragments, "c d");
assert_eq!(snippet.to_html(), "<b>c</b> d");
}
#[test]
fn test_snippet_with_term_at_the_end_of_fragment() {
let text = "a b c d e f f g";
let mut terms = BTreeMap::new();
terms.insert(String::from("f"), 1.0);
let fragments = search_fragments(&From::from(SimpleTokenizer), &text, &terms, 3);
assert_eq!(fragments.len(), 2);
{
let first = fragments.iter().nth(0).unwrap();
assert_eq!(first.score, 1.0);
assert_eq!(first.stop_offset, 11);
assert_eq!(first.start_offset, 8);
}
let snippet = select_best_fragment_combination(&fragments[..], &text);
assert_eq!(snippet.fragments, "e f");
assert_eq!(snippet.to_html(), "e <b>f</b>");
}
#[test]
fn test_snippet_with_second_fragment_has_the_highest_score() {
let text = "a b c d e f g";
let mut terms = BTreeMap::new();
terms.insert(String::from("f"), 1.0);
terms.insert(String::from("a"), 0.9);
let fragments = search_fragments(&From::from(SimpleTokenizer), &text, &terms, 7);
assert_eq!(fragments.len(), 2);
{
let first = fragments.iter().nth(0).unwrap();
assert_eq!(first.score, 0.9);
assert_eq!(first.stop_offset, 7);
assert_eq!(first.start_offset, 0);
}
let snippet = select_best_fragment_combination(&fragments[..], &text);
assert_eq!(snippet.fragments, "e f g"); |
#[test]
fn test_snippet_with_term_not_in_text() {
let text = "a b c d";
let mut terms = BTreeMap::new();
terms.insert(String::from("z"), 1.0);
let fragments = search_fragments(&From::from(SimpleTokenizer), &text, &terms, 3);
assert_eq!(fragments.len(), 0);
let snippet = select_best_fragment_combination(&fragments[..], &text);
assert_eq!(snippet.fragments, "");
assert_eq!(snippet.to_html(), "");
}
#[test]
fn test_snippet_with_no_terms() {
let text = "a b c d";
let terms = BTreeMap::new();
let fragments = search_fragments(&From::from(SimpleTokenizer), &text, &terms, 3);
assert_eq!(fragments.len(), 0);
let snippet = select_best_fragment_combination(&fragments[..], &text);
assert_eq!(snippet.fragments, "");
assert_eq!(snippet.to_html(), "");
}
#[test]
fn test_snippet_generator_term_score() {
let mut schema_builder = Schema::builder();
let text_field = schema_builder.add_text_field("text", TEXT);
let schema = schema_builder.build();
let index = Index::create_in_ram(schema);
{
// writing the segment
let mut index_writer = index.writer_for_tests().unwrap();
index_writer.add_document(doc!(text_field => "a"));
index_writer.add_document(doc!(text_field => "a"));
index_writer.add_document(doc!(text_field => "a b"));
index_writer.commit().unwrap();
}
let searcher = index.reader().unwrap().searcher();
let query_parser = QueryParser::for_index(&index, vec![text_field]);
{
let query = query_parser.parse_query("e").unwrap();
let snippet_generator =
SnippetGenerator::create(&searcher, &*query, text_field).unwrap();
assert!(snippet_generator.terms_text().is_empty());
}
{
let query = query_parser.parse_query("a").unwrap();
let snippet_generator =
SnippetGenerator::create(&searcher, &*query, text_field).unwrap();
assert_eq!(
&btreemap!("a".to_string() => 0.25),
snippet_generator.terms_text()
);
}
{
let query = query_parser.parse_query("a b").unwrap();
let snippet_generator =
SnippetGenerator::create(&searcher, &*query, text_field).unwrap();
assert_eq!(
&btreemap!("a".to_string() => 0.25, "b".to_string() => 0.5),
snippet_generator.terms_text()
);
}
{
let query = query_parser.parse_query("a b c").unwrap();
let snippet_generator =
SnippetGenerator::create(&searcher, &*query, text_field).unwrap();
assert_eq!(
&btreemap!("a".to_string() => 0.25, "b".to_string() => 0.5),
snippet_generator.terms_text()
);
}
}
#[test]
fn test_snippet_generator() {
let mut schema_builder = Schema::builder();
let text_options = TextOptions::default().set_indexing_options(
TextFieldIndexing::default()
.set_tokenizer("en_stem")
.set_index_option(IndexRecordOption::Basic),
);
let text_field = schema_builder.add_text_field("text", text_options);
let schema = schema_builder.build();
let index = Index::create_in_ram(schema);
{
// writing the segment
let mut index_writer = index.writer_for_tests().unwrap();
{
let doc = doc ! (text_field => TEST_TEXT);
index_writer.add_document(doc);
}
index_writer.commit().unwrap();
}
let searcher = index.reader().unwrap().searcher();
let query_parser = QueryParser::for_index(&index, vec![text_field]);
let query = query_parser.parse_query("rust design").unwrap();
let mut snippet_generator =
SnippetGenerator::create(&searcher, &*query, text_field).unwrap();
{
let snippet = snippet_generator.snippet(TEST_TEXT);
assert_eq!(snippet.to_html(), "imperative-procedural paradigms. <b>Rust</b> is syntactically similar to C++[according to whom?],\nbut its <b>designers</b> intend it to provide better memory safety");
}
{
snippet_generator.set_max_num_chars(90);
let snippet = snippet_generator.snippet(TEST_TEXT);
assert_eq!(snippet.to_html(), "<b>Rust</b> is syntactically similar to C++[according to whom?],\nbut its <b>designers</b> intend it to");
}
}
} | assert_eq!(snippet.to_html(), "e <b>f</b> g");
} |
transcription_folder_to_sclite_hyp.py | import fileinput
import os
def to_sclite_line(trans):
with open(trans, "r") as fd:
hyp = fd.read()
_id, _ = os.path.splitext(os.path.basename(trans))
return f"{hyp} ({_id})"
| for ln in finput:
print(to_sclite_line(ln.strip()))
if __name__ == "__main__":
main() | def main():
with fileinput.input() as finput: |
test_recognize_pii_entities_async.py | # coding=utf-8
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import os
import pytest
import platform
import functools
from azure.core.exceptions import HttpResponseError, ClientAuthenticationError
from azure.core.credentials import AzureKeyCredential
from asynctestcase import AsyncTextAnalyticsTest
from testcase import GlobalTextAnalyticsAccountPreparer
from testcase import TextAnalyticsClientPreparer as _TextAnalyticsClientPreparer
from azure.ai.textanalytics.aio import TextAnalyticsClient
from azure.ai.textanalytics import (
TextDocumentInput,
VERSION,
TextAnalyticsApiVersion,
PiiEntityDomainType,
)
# pre-apply the client_cls positional argument so it needn't be explicitly passed below
# the first one
TextAnalyticsClientPreparer = functools.partial(_TextAnalyticsClientPreparer, TextAnalyticsClient)
class TestRecognizePIIEntities(AsyncTextAnalyticsTest):
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_no_single_input(self, client):
with self.assertRaises(TypeError):
response = await client.recognize_pii_entities("hello world")
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_all_successful_passing_dict(self, client):
docs = [{"id": "1", "text": "My SSN is 859-98-0987."},
{"id": "2", "text": "Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check."},
{"id": "3", "text": "Is 998.214.865-68 your Brazilian CPF number?"}]
response = await client.recognize_pii_entities(docs, show_stats=True)
self.assertEqual(response[0].entities[0].text, "859-98-0987")
self.assertEqual(response[0].entities[0].category, "U.S. Social Security Number (SSN)")
self.assertEqual(response[1].entities[0].text, "111000025")
# self.assertEqual(response[1].entities[0].category, "ABA Routing Number") # Service is currently returning PhoneNumber here
self.assertEqual(response[2].entities[0].text, "998.214.865-68")
self.assertEqual(response[2].entities[0].category, "Brazil CPF Number")
for doc in response:
self.assertIsNotNone(doc.id)
self.assertIsNotNone(doc.statistics)
for entity in doc.entities:
self.assertIsNotNone(entity.text)
self.assertIsNotNone(entity.category)
self.assertIsNotNone(entity.offset)
self.assertIsNotNone(entity.confidence_score)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_all_successful_passing_text_document_input(self, client):
docs = [
TextDocumentInput(id="1", text="My SSN is 859-98-0987."),
TextDocumentInput(id="2", text="Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check."),
TextDocumentInput(id="3", text="Is 998.214.865-68 your Brazilian CPF number?")
]
response = await client.recognize_pii_entities(docs, show_stats=True)
self.assertEqual(response[0].entities[0].text, "859-98-0987")
self.assertEqual(response[0].entities[0].category, "U.S. Social Security Number (SSN)")
self.assertEqual(response[1].entities[0].text, "111000025")
# self.assertEqual(response[1].entities[0].category, "ABA Routing Number") # Service is currently returning PhoneNumber here
self.assertEqual(response[2].entities[0].text, "998.214.865-68")
self.assertEqual(response[2].entities[0].category, "Brazil CPF Number")
for doc in response:
self.assertIsNotNone(doc.id)
self.assertIsNotNone(doc.statistics)
for entity in doc.entities:
self.assertIsNotNone(entity.text)
self.assertIsNotNone(entity.category)
self.assertIsNotNone(entity.offset)
self.assertIsNotNone(entity.confidence_score)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_passing_only_string(self, client):
docs = [
u"My SSN is 859-98-0987.",
u"Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check.",
u"Is 998.214.865-68 your Brazilian CPF number?",
u""
]
response = await client.recognize_pii_entities(docs, show_stats=True)
self.assertEqual(response[0].entities[0].text, "859-98-0987")
self.assertEqual(response[0].entities[0].category, "U.S. Social Security Number (SSN)")
self.assertEqual(response[1].entities[0].text, "111000025")
# self.assertEqual(response[1].entities[0].category, "ABA Routing Number") # Service is currently returning PhoneNumber here
self.assertEqual(response[2].entities[0].text, "998.214.865-68")
self.assertEqual(response[2].entities[0].category, "Brazil CPF Number")
self.assertTrue(response[3].is_error)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_input_with_some_errors(self, client):
docs = [{"id": "1", "language": "es", "text": "hola"},
{"id": "2", "text": ""},
{"id": "3", "text": "Is 998.214.865-68 your Brazilian CPF number?"}]
response = await client.recognize_pii_entities(docs)
self.assertTrue(response[0].is_error)
self.assertTrue(response[1].is_error)
self.assertFalse(response[2].is_error)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_input_with_all_errors(self, client):
|
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_too_many_documents(self, client):
docs = ["One", "Two", "Three", "Four", "Five", "Six"]
with pytest.raises(HttpResponseError) as excinfo:
await client.recognize_pii_entities(docs)
assert excinfo.value.status_code == 400
assert excinfo.value.error.code == "InvalidDocumentBatch"
assert "Batch request contains too many records" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_output_same_order_as_input(self, client):
docs = [
TextDocumentInput(id="1", text="one"),
TextDocumentInput(id="2", text="two"),
TextDocumentInput(id="3", text="three"),
TextDocumentInput(id="4", text="four"),
TextDocumentInput(id="5", text="five")
]
response = await client.recognize_pii_entities(docs)
for idx, doc in enumerate(response):
self.assertEqual(str(idx + 1), doc.id)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"text_analytics_account_key": ""})
async def test_empty_credential_class(self, client):
with self.assertRaises(ClientAuthenticationError):
response = await client.recognize_pii_entities(
["This is written in English."]
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"text_analytics_account_key": "xxxxxxxxxxxx"})
async def test_bad_credentials(self, client):
with self.assertRaises(ClientAuthenticationError):
response = await client.recognize_pii_entities(
["This is written in English."]
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_bad_document_input(self, client):
docs = "This is the wrong type"
with self.assertRaises(TypeError):
response = await client.recognize_pii_entities(docs)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_mixing_inputs(self, client):
docs = [
{"id": "1", "text": "Microsoft was founded by Bill Gates and Paul Allen."},
TextDocumentInput(id="2", text="I did not like the hotel we stayed at. It was too expensive."),
u"You cannot mix string input with the above inputs"
]
with self.assertRaises(TypeError):
response = await client.recognize_pii_entities(docs)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_out_of_order_ids(self, client):
docs = [{"id": "56", "text": ":)"},
{"id": "0", "text": ":("},
{"id": "22", "text": ""},
{"id": "19", "text": ":P"},
{"id": "1", "text": ":D"}]
response = await client.recognize_pii_entities(docs)
in_order = ["56", "0", "22", "19", "1"]
for idx, resp in enumerate(response):
self.assertEqual(resp.id, in_order[idx])
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_show_stats_and_model_version(self, client):
def callback(response):
self.assertIsNotNone(response)
self.assertIsNotNone(response.model_version, msg=response.raw_response)
self.assertIsNotNone(response.raw_response)
self.assertEqual(response.statistics.document_count, 5)
self.assertEqual(response.statistics.transaction_count, 4)
self.assertEqual(response.statistics.valid_document_count, 4)
self.assertEqual(response.statistics.erroneous_document_count, 1)
docs = [{"id": "56", "text": ":)"},
{"id": "0", "text": ":("},
{"id": "22", "text": ""},
{"id": "19", "text": ":P"},
{"id": "1", "text": ":D"}]
response = await client.recognize_pii_entities(
docs,
show_stats=True,
model_version="latest",
raw_response_hook=callback
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_batch_size_over_limit(self, client):
docs = [u"hello world"] * 1050
with self.assertRaises(HttpResponseError):
response = await client.recognize_pii_entities(docs)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_language_hint(self, client):
def callback(resp):
language_str = "\"language\": \"fr\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
docs = [
u"This was the best day of my life.",
u"I did not like the hotel we stayed at. It was too expensive.",
u"The restaurant was not as good as I hoped."
]
response = await client.recognize_pii_entities(docs, language="fr", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_dont_use_language_hint(self, client):
def callback(resp):
language_str = "\"language\": \"\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
docs = [
u"This was the best day of my life.",
u"I did not like the hotel we stayed at. It was too expensive.",
u"The restaurant was not as good as I hoped."
]
response = await client.recognize_pii_entities(docs, language="", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_per_item_dont_use_language_hint(self, client):
def callback(resp):
language_str = "\"language\": \"\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 2)
language_str = "\"language\": \"en\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 1)
docs = [{"id": "1", "language": "", "text": "I will go to the park."},
{"id": "2", "language": "", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs, raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_language_hint_and_obj_input(self, client):
def callback(resp):
language_str = "\"language\": \"de\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
docs = [
TextDocumentInput(id="1", text="I should take my cat to the veterinarian."),
TextDocumentInput(id="4", text="Este es un document escrito en Español."),
TextDocumentInput(id="3", text="猫は幸せ"),
]
response = await client.recognize_pii_entities(docs, language="de", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_language_hint_and_obj_per_item_hints(self, client):
def callback(resp):
language_str = "\"language\": \"es\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 2)
language_str = "\"language\": \"en\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 1)
docs = [
TextDocumentInput(id="1", text="I should take my cat to the veterinarian.", language="es"),
TextDocumentInput(id="2", text="Este es un document escrito en Español.", language="es"),
TextDocumentInput(id="3", text="猫は幸せ"),
]
response = await client.recognize_pii_entities(docs, language="en", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_language_hint_and_dict_per_item_hints(self, client):
def callback(resp):
language_str = "\"language\": \"es\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 2)
language_str = "\"language\": \"en\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 1)
docs = [{"id": "1", "language": "es", "text": "I will go to the park."},
{"id": "2", "language": "es", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs, language="en", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"default_language": "es"})
async def test_client_passed_default_language_hint(self, client):
def callback(resp):
language_str = "\"language\": \"es\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
def callback_2(resp):
language_str = "\"language\": \"en\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
docs = [{"id": "1", "text": "I will go to the park."},
{"id": "2", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs, raw_response_hook=callback)
response = await client.recognize_pii_entities(docs, language="en", raw_response_hook=callback_2)
response = await client.recognize_pii_entities(docs, raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_invalid_language_hint_method(self, client):
response = await client.recognize_pii_entities(
["This should fail because we're passing in an invalid language hint"], language="notalanguage"
)
self.assertEqual(response[0].error.code, 'UnsupportedLanguageCode')
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_invalid_language_hint_docs(self, client):
response = await client.recognize_pii_entities(
[{"id": "1", "language": "notalanguage", "text": "This should fail because we're passing in an invalid language hint"}]
)
self.assertEqual(response[0].error.code, 'UnsupportedLanguageCode')
@GlobalTextAnalyticsAccountPreparer()
async def test_rotate_subscription_key(self, resource_group, location, text_analytics_account, text_analytics_account_key):
credential = AzureKeyCredential(text_analytics_account_key)
client = TextAnalyticsClient(text_analytics_account, credential)
docs = [{"id": "1", "text": "I will go to the park."},
{"id": "2", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs)
self.assertIsNotNone(response)
credential.update("xxx") # Make authentication fail
with self.assertRaises(ClientAuthenticationError):
response = await client.recognize_pii_entities(docs)
credential.update(text_analytics_account_key) # Authenticate successfully again
response = await client.recognize_pii_entities(docs)
self.assertIsNotNone(response)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_user_agent(self, client):
def callback(resp):
self.assertIn("azsdk-python-ai-textanalytics/{} Python/{} ({})".format(
VERSION, platform.python_version(), platform.platform()),
resp.http_request.headers["User-Agent"]
)
docs = [{"id": "1", "text": "I will go to the park."},
{"id": "2", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs, raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_document_attribute_error_no_result_attribute(self, client):
docs = [{"id": "1", "text": ""}]
response = await client.recognize_pii_entities(docs)
# Attributes on DocumentError
self.assertTrue(response[0].is_error)
self.assertEqual(response[0].id, "1")
self.assertIsNotNone(response[0].error)
# Result attribute not on DocumentError, custom error message
try:
entities = response[0].entities
except AttributeError as custom_error:
self.assertEqual(
custom_error.args[0],
'\'DocumentError\' object has no attribute \'entities\'. '
'The service was unable to process this document:\nDocument Id: 1\nError: '
'InvalidDocument - Document text is empty.\n'
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_document_attribute_error_nonexistent_attribute(self, client):
docs = [{"id": "1", "text": ""}]
response = await client.recognize_pii_entities(docs)
# Attribute not found on DocumentError or result obj, default behavior/message
try:
entities = response[0].attribute_not_on_result_or_error
except AttributeError as default_behavior:
self.assertEqual(
default_behavior.args[0],
'\'DocumentError\' object has no attribute \'attribute_not_on_result_or_error\''
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_bad_model_version_error(self, client):
docs = [{"id": "1", "language": "english", "text": "I did not like the hotel we stayed at."}]
try:
result = await client.recognize_pii_entities(docs, model_version="bad")
except HttpResponseError as err:
self.assertEqual(err.error.code, "ModelVersionIncorrect")
self.assertIsNotNone(err.error.message)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_document_errors(self, client):
text = ""
for _ in range(5121):
text += "x"
docs = [{"id": "1", "text": ""},
{"id": "2", "language": "english", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": text}]
doc_errors = await client.recognize_pii_entities(docs)
self.assertEqual(doc_errors[0].error.code, "InvalidDocument")
self.assertIsNotNone(doc_errors[0].error.message)
self.assertEqual(doc_errors[1].error.code, "UnsupportedLanguageCode")
self.assertIsNotNone(doc_errors[1].error.message)
self.assertEqual(doc_errors[2].error.code, "InvalidDocument")
self.assertIsNotNone(doc_errors[2].error.message)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_document_warnings(self, client):
# No warnings actually returned for recognize_pii_entities. Will update when they add
docs = [
{"id": "1", "text": "This won't actually create a warning :'("},
]
result = await client.recognize_pii_entities(docs)
for doc in result:
doc_warnings = doc.warnings
self.assertEqual(len(doc_warnings), 0)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_not_passing_list_for_docs(self, client):
docs = {"id": "1", "text": "hello world"}
with pytest.raises(TypeError) as excinfo:
await client.recognize_pii_entities(docs)
assert "Input documents cannot be a dict" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_missing_input_records_error(self, client):
docs = []
with pytest.raises(ValueError) as excinfo:
await client.recognize_pii_entities(docs)
assert "Input documents can not be empty or None" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_passing_none_docs(self, client):
with pytest.raises(ValueError) as excinfo:
await client.recognize_pii_entities(None)
assert "Input documents can not be empty or None" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_duplicate_ids_error(self, client):
# Duplicate Ids
docs = [{"id": "1", "text": "hello world"},
{"id": "1", "text": "I did not like the hotel we stayed at."}]
try:
result = await client.recognize_pii_entities(docs)
except HttpResponseError as err:
self.assertEqual(err.error.code, "InvalidDocument")
self.assertIsNotNone(err.error.message)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_batch_size_over_limit_error(self, client):
# Batch size over limit
docs = [u"hello world"] * 1001
try:
response = await client.recognize_pii_entities(docs)
except HttpResponseError as err:
self.assertEqual(err.error.code, "InvalidDocumentBatch")
self.assertIsNotNone(err.error.message)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_pass_cls(self, client):
def callback(pipeline_response, deserialized, _):
return "cls result"
res = await client.recognize_pii_entities(
documents=["Test passing cls to endpoint"],
cls=callback
)
assert res == "cls result"
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_language_kwarg_english(self, client):
def callback(response):
language_str = "\"language\": \"en\""
self.assertEqual(response.http_request.body.count(language_str), 1)
self.assertIsNotNone(response.model_version)
self.assertIsNotNone(response.statistics)
res = await client.recognize_pii_entities(
documents=["Bill Gates is the CEO of Microsoft."],
model_version="latest",
show_stats=True,
language="en",
raw_response_hook=callback
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"api_version": TextAnalyticsApiVersion.V3_0})
async def test_recognize_pii_entities_v3(self, client):
with pytest.raises(ValueError) as excinfo:
await client.recognize_pii_entities(["this should fail"])
assert "'recognize_pii_entities' endpoint is only available for API version v3.1-preview and up" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_redacted_text(self, client):
result = await client.recognize_pii_entities(["My SSN is 859-98-0987."])
self.assertEqual("My SSN is ***********.", result[0].redacted_text)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_phi_domain_filter(self, client):
# without the domain filter, this should return two entities: Microsoft as an org,
# and the phone number. With the domain filter, it should only return one.
result = await client.recognize_pii_entities(
["I work at Microsoft and my phone number is 333-333-3333"],
domain_filter=PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION
)
self.assertEqual(len(result[0].entities), 1)
self.assertEqual(result[0].entities[0].text, '333-333-3333')
self.assertEqual(result[0].entities[0].category, 'Phone Number')
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"api_version": TextAnalyticsApiVersion.V3_0})
async def test_string_index_type_explicit_fails_v3(self, client):
with pytest.raises(ValueError) as excinfo:
await client.recognize_pii_entities(["this should fail"], string_index_type="UnicodeCodePoint")
assert "'string_index_type' is only available for API version v3.1-preview and up" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_default_string_index_type_is_UnicodeCodePoint(self, client):
def callback(response):
self.assertEqual(response.http_request.query["stringIndexType"], "UnicodeCodePoint")
res = await client.recognize_pii_entities(
documents=["Hello world"],
raw_response_hook=callback
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_explicit_set_string_index_type(self, client):
def callback(response):
self.assertEqual(response.http_request.query["stringIndexType"], "TextElements_v8")
res = await client.recognize_pii_entities(
documents=["Hello world"],
string_index_type="TextElements_v8",
raw_response_hook=callback
) | docs = [{"id": "1", "text": ""},
{"id": "2", "language": "Spanish", "text": "Hola"},
{"id": "3", "language": "de", "text": ""}]
response = await client.recognize_pii_entities(docs)
self.assertTrue(response[0].is_error)
self.assertTrue(response[1].is_error)
self.assertTrue(response[2].is_error) |
conftest.py | import pytest
import pathlib
import os
import subprocess
import tempfile
from kopf.testing import KopfRunner
from dask_kubernetes.common.utils import check_dependency
DIR = pathlib.Path(__file__).parent.absolute()
check_dependency("helm")
check_dependency("kubectl")
check_dependency("docker")
@pytest.fixture()
async def kopf_runner(k8s_cluster):
yield KopfRunner(["run", "-m", "dask_kubernetes.operator", "--verbose"])
@pytest.fixture(scope="session")
def docker_image():
image_name = "dask-kubernetes:dev"
subprocess.check_output(["docker", "build", "-t", image_name, "./ci/"])
return image_name
@pytest.fixture(scope="session")
def k8s_cluster(kind_cluster, docker_image):
os.environ["KUBECONFIG"] = str(kind_cluster.kubeconfig_path)
kind_cluster.load_docker_image(docker_image)
yield kind_cluster
del os.environ["KUBECONFIG"]
@pytest.fixture(scope="session")
def ns(k8s_cluster):
return "default"
def run_generate(crd_path, patch_path, temp_path):
subprocess.run(
["k8s-crd-resolver", "-r", "-j", patch_path, crd_path, temp_path],
check=True,
env={**os.environ},
)
@pytest.fixture(scope="session", autouse=True)
def customresources(k8s_cluster):
| temp_dir = tempfile.TemporaryDirectory()
crd_path = os.path.join(DIR, "operator", "customresources")
run_generate(
os.path.join(crd_path, "daskcluster.yaml"),
os.path.join(crd_path, "daskcluster.patch.yaml"),
os.path.join(temp_dir.name, "daskcluster.yaml"),
)
run_generate(
os.path.join(crd_path, "daskworkergroup.yaml"),
os.path.join(crd_path, "daskworkergroup.patch.yaml"),
os.path.join(temp_dir.name, "daskworkergroup.yaml"),
)
k8s_cluster.kubectl("apply", "-f", temp_dir.name)
yield
k8s_cluster.kubectl("delete", "-f", temp_dir.name)
temp_dir.cleanup() |
|
snake-case.js | var sentence = require('sentence-case');
/**
* Snake case a string.
*
* @param {String} string
* @return {String}
*/
module.exports = function (string) { | return sentence(string).replace(/ |\./g, '_');
}; | |
delete-service-category-command.handler.ts | import { CommandHandler } from '@nestjs/cqrs';
import { DeleteServiceCategoryCommand } from '../impl/delete-service-category.command';
import { ModuleRef } from '@nestjs/core';
import { DeleteCommandHandler } from 'src/shared/modules/app-cqrs/commands/handler/delete-command.handler';
import { ServiceCategoryEntity } from '../../../entities/service-category.entity'; | @CommandHandler(DeleteServiceCategoryCommand)
export class DeleteServiceCategoryCommandHandler extends DeleteCommandHandler<ServiceCategoryEntity>{
constructor(
readonly _moduleRef:ModuleRef
) {
super(_moduleRef, ServiceCategoryEntityService.name)
}
} | import { ServiceCategoryEntityService } from '../../../services/service-category-entity.service';
|
assertion-add.view.ts | import { Component, OnInit } from '@angular/core';
@Component({
selector: 'cvc-assertion-add',
templateUrl: './assertion-add.view.html',
styleUrls: ['./assertion-add.view.less']
})
export class | implements OnInit {
constructor() { }
ngOnInit(): void {
}
}
| AssertionAddView |
span.go | package tracing
import (
"context"
"errors"
"fmt"
"strconv"
"time"
sentry "github.com/getsentry/sentry-go"
opentracing "github.com/opentracing/opentracing-go"
otlog "github.com/opentracing/opentracing-go/log"
"github.com/reddit/baseplate.go/log"
)
var (
_ opentracing.SpanContext = (*Span)(nil)
_ opentracing.Span = (*Span)(nil)
)
// SpanType enum.
type SpanType int
// SpanType values.
const (
SpanTypeLocal SpanType = iota
SpanTypeClient
SpanTypeServer
)
const (
client = "client"
local = "local"
server = "server"
unknown = "unknown"
)
func (st SpanType) String() string {
switch st {
default:
return unknown
case SpanTypeServer:
return server
case SpanTypeClient:
return client
case SpanTypeLocal:
return local
}
}
type contextKey int
const (
serverSpanKey contextKey = iota
activeSpanKey
)
// AsSpan converts an opentracing.Span back to *Span.
//
// This function never returns nil.
// If the passed in opentracing.Span is actually not implemented by *Span,
// a new *Span with empty name and local type will be created and returned.
// When that happens it will also be logged if the last InitGlobalTracer call
// was with a non-nil logger.
//
// This function is provided for convenience calling functions not in
// opentracing Span API, for example:
//
// span := opentracing.StartSpan(name, opts...)
// tracing.AsSpan(span).AddHooks(hooks...)
func AsSpan(s opentracing.Span) *Span {
if span, ok := s.(*Span); ok && span != nil {
return span
}
globalTracer.logger.Log(fmt.Sprintf(
"Failed to cast opentracing.Span %#v back to *tracing.Span.",
s,
))
return newSpan(nil, "", SpanTypeLocal)
}
func newSpan(tracer *Tracer, name string, spanType SpanType) *Span {
span := &Span{
trace: newTrace(tracer, name),
spanType: spanType,
}
switch spanType {
case SpanTypeServer:
span.trace.timeAnnotationReceiveKey = ZipkinTimeAnnotationKeyServerReceive
span.trace.timeAnnotationSendKey = ZipkinTimeAnnotationKeyServerSend
case SpanTypeClient:
span.trace.timeAnnotationReceiveKey = ZipkinTimeAnnotationKeyClientReceive
span.trace.timeAnnotationSendKey = ZipkinTimeAnnotationKeyClientSend
}
return span
}
// Span defines a tracing span.
type Span struct {
trace *trace
component string
hooks []interface{}
spanType SpanType
hub *sentry.Hub
}
func (s *Span) onStart() {
for _, h := range s.hooks {
if hook, ok := h.(StartStopSpanHook); ok {
if err := hook.OnPostStart(s); err != nil {
s.logError("OnPostStart hook error: ", err)
}
}
}
}
// ID returns the ID for the Span.
func (s Span) ID() uint64 {
return s.trace.spanID
}
// Name returns the name of the Span.
func (s Span) Name() string {
return s.trace.name
}
// SpanType returns the SpanType for the Span.
func (s Span) SpanType() SpanType {
return s.spanType
}
// TraceID returns the ID for the Trace that this span is a part of.
func (s Span) TraceID() uint64 {
return s.trace.traceID
}
// ParentID returns the ID for the parent span of the current span.
func (s Span) ParentID() uint64 {
return s.trace.parentID
}
// Flags returns the flags set on the current span.
func (s Span) Flags() int64 {
return s.trace.flags
}
// Sampled returns if the current span is sampled.
func (s Span) Sampled() bool {
return s.trace.sampled
}
// StartTime the time that the span was started.
func (s Span) StartTime() time.Time {
return s.trace.start
}
// StopTime the time that the span was stopped if it has been stopped, will be
// zero if it has not been stopped yet.
func (s Span) StopTime() time.Time {
return s.trace.stop
}
// logError is a helper method to log an error plus a message.
//
// This uses the the logger provided by the underlying tracing.Tracer used to
// publish the Span.
func (s Span) logError(msg string, err error) {
s.trace.tracer.logger.Log(msg + err.Error())
}
// AddHooks adds hooks into the Span.
//
// Any hooks that do not conform to at least one of the span hook interfaces
// will be discarded and an error will be logged.
//
// It is recommended that you only call AddHooks on a Span within an
// OnCreateChild/OnCreateServerSpan hook so the Span is set up with all of its
// hooks as a part of its creation.
func (s *Span) AddHooks(hooks ...interface{}) {
for _, hook := range hooks {
if IsSpanHook(hook) | else {
s.logError(
"AddHooks error: ",
fmt.Errorf(
"tracing.Span.AddHooks: attempting to add non-SpanHook object into span's hook registry: %#v",
hook,
),
)
}
}
}
// SetDebug sets or unsets the debug flag of this Span.
func (s *Span) SetDebug(v bool) {
s.trace.setDebug(v)
}
// SetTag sets a binary tag annotation and calls all OnSetTag Hooks
// registered to the Span.
func (s *Span) SetTag(key string, value interface{}) opentracing.Span {
s.trace.setTag(key, value)
for _, h := range s.hooks {
if hook, ok := h.(SetSpanTagHook); ok {
if err := hook.OnSetTag(s, key, value); err != nil {
s.logError("OnSetTag hook error: ", err)
}
}
}
return s
}
// AddCounter adds delta to a counter annotation and calls all OnAddCounter
// Hooks registered to the Span.
func (s *Span) AddCounter(key string, delta float64) {
s.trace.addCounter(key, delta)
for _, h := range s.hooks {
if hook, ok := h.(AddSpanCounterHook); ok {
if err := hook.OnAddCounter(s, key, delta); err != nil {
s.logError("OnAddCounter hook error: ", err)
}
}
}
}
// Component returns the local component name of this span, with special cases.
//
// For local spans,
// this returns the component name set while starting the span,
// or "local" if it's empty.
// For client spans, this returns "clients".
// For all other span types, this returns the string version of the span type.
func (s *Span) Component() string {
switch s.spanType {
case SpanTypeClient:
return "clients"
case SpanTypeLocal:
if s.component != "" {
return s.component
}
}
return s.spanType.String()
}
// initChildSpan do the initialization for the child span to inherit from the
// parent.
func (s Span) initChildSpan(child *Span) {
child.trace.parentID = s.trace.spanID
child.trace.traceID = s.trace.traceID
child.trace.sampled = s.trace.sampled
child.trace.flags = s.trace.flags
child.hub = s.hub
if child.spanType != SpanTypeServer {
// We treat server spans differently. They should only be child to a span
// from the client side, and have their own create hooks, so we don't call
// their hooks here. See also: Tracer.StartSpan.
for _, h := range s.hooks {
if hook, ok := h.(CreateChildSpanHook); ok {
if err := hook.OnCreateChild(&s, child); err != nil {
s.logError("OnCreateChild hook error: ", err)
}
}
}
child.onStart()
}
}
// Stop stops the Span, calls all registered OnPreStop Hooks,
// serializes the Span,
// and sends the serialized Span to a back-end that records the Span.
//
// In most cases FinishWithOptions should be used instead,
// which calls Stop and auto logs the error returned by Stop.
// Stop is still provided in case there's need to handle the error differently.
func (s *Span) Stop(ctx context.Context, err error) error {
s.preStop(err)
for _, h := range s.hooks {
if hook, ok := h.(StartStopSpanHook); ok {
if hookErr := hook.OnPreStop(s, err); hookErr != nil {
s.logError("OnPreStop hook error: ", hookErr)
}
}
}
if s.trace.stop.IsZero() {
s.trace.stop = time.Now()
}
return s.trace.publish(ctx)
}
func (s *Span) preStop(err error) {
// We intentionally don't use the top level span.SetTag function
// because we don't want to trigger any OnSetTag Hooks in this case.
switch s.spanType {
case SpanTypeServer:
if err != nil && errors.Is(err, context.DeadlineExceeded) {
s.trace.setTag(ZipkinBinaryAnnotationKeyTimeOut, true)
}
case SpanTypeLocal:
if s.component != "" {
s.trace.setTag(ZipkinBinaryAnnotationKeyLocalComponent, s.component)
}
}
if err != nil {
s.trace.setTag(ZipkinBinaryAnnotationKeyError, true)
}
if s.trace.isDebugSet() {
s.trace.setTag(ZipkinBinaryAnnotationKeyDebug, true)
}
}
// getHub returns the *sentry.Hub attached to this span/trace.
//
// It's guaranteed to be non-nil.
func (s Span) getHub() *sentry.Hub {
if s.hub != nil {
return s.hub
}
// This shouldn't happen, but just in case to avoid panics.
return getNopHub()
}
// InjectTraceContext injects the sentry hub and logger with trace id
// information to the context object.
//
// It's called automatically by StartSpanFromHeaders and thriftbp/httpbp
// middlewares,
// so you don't need to call it for spans created automatically from requests.
// But you should call it if you created a top level span manually.
//
// It's also not needed to be called for the child spans,
// as the trace id attached would be the same.
func (s Span) InjectTraceContext(ctx context.Context) context.Context {
ctx = context.WithValue(ctx, sentry.HubContextKey, s.getHub())
ctx = log.Attach(ctx, log.AttachArgs{
TraceID: s.TraceID(),
})
return ctx
}
// ForeachBaggageItem implements opentracing.SpanContext.
//
// We don't support any extra baggage items, so it's a noop.
func (s *Span) ForeachBaggageItem(handler func(k, v string) bool) {}
// SetBaggageItem implements opentracing.Span.
//
// As we don't support any extra baggage items,
// it's a noop and just returns self.
func (s *Span) SetBaggageItem(restrictedKey, value string) opentracing.Span {
return s
}
// BaggageItem implements opentracing.Span.
//
// As we don't support any extra baggage items, it always returns empty string.
func (s *Span) BaggageItem(restrictedKey string) string {
return ""
}
// Finish implements opentracing.Span.
//
// It calls Stop with background context and nil error.
// If Stop returns an error, it will also be logged with the tracer's logger.
func (s *Span) Finish() {
if err := s.Stop(context.Background(), nil); err != nil {
s.logError("Span.Stop returned error: ", err)
}
}
// FinishWithOptions implements opentracing.Span.
//
// In this implementation we ignore all timestamps in opts,
// only extract context and error out of all the log fields,
// and ignore all other log fields.
//
// Please use FinishOptions.Convert() to prepare the opts arg.
//
// It calls Stop with context and error extracted from opts.
// If Stop returns an error, it will also be logged with the tracer's logger.
func (s *Span) FinishWithOptions(opts opentracing.FinishOptions) {
if !opts.FinishTime.IsZero() {
s.trace.stop = opts.FinishTime
}
var err error
ctx := context.Background()
for _, records := range opts.LogRecords {
for _, field := range records.Fields {
switch field.Key() {
case ctxKey:
if c, ok := field.Value().(context.Context); ok {
ctx = c
}
case errorKey:
if e, ok := field.Value().(error); ok {
err = e
}
}
}
}
if stopErr := s.Stop(ctx, err); stopErr != nil {
s.logError("Span.Stop returned error: ", stopErr)
}
}
// Context implements opentracing.Span.
//
// It returns self as opentracing.SpanContext.
func (s *Span) Context() opentracing.SpanContext {
return s
}
// SetOperationName implements opentracing.Span.
func (s *Span) SetOperationName(operationName string) opentracing.Span {
s.trace.name = operationName
return s
}
// Tracer implements opentracing.Span.
func (s *Span) Tracer() opentracing.Tracer {
return s.trace.tracer
}
// LogFields implements opentracing.Span.
//
// In this implementation it's a no-op.
func (s *Span) LogFields(fields ...otlog.Field) {}
// LogKV implements opentracing.Span.
//
// In this implementation it's a no-op.
func (s *Span) LogKV(alternatingKeyValues ...interface{}) {}
// LogEvent implements opentracing.Span.
//
// it's deprecated in the interface and is a no-op here.
func (s *Span) LogEvent(event string) {}
// LogEventWithPayload implements opentracing.Span.
//
// it's deprecated in the interface and is a noop here.
func (s *Span) LogEventWithPayload(event string, payload interface{}) {}
// Log implements opentracing.Span.
//
// it's deprecated in the interface and is a no-op here.
func (s *Span) Log(data opentracing.LogData) {}
// StartTopLevelServerSpan initializes a new, top level server span.
//
// This span will have a new TraceID and will be sampled based on your configured
// sample rate.
func StartTopLevelServerSpan(ctx context.Context, name string) (context.Context, *Span) {
otSpan, ctx := opentracing.StartSpanFromContext(
ctx,
name,
SpanTypeOption{Type: SpanTypeServer},
)
span := AsSpan(otSpan)
return span.InjectTraceContext(ctx), span
}
// Headers is the argument struct for starting a Span from upstream headers.
type Headers struct {
// TraceID is the trace ID passed via upstream headers.
TraceID string
// SpanID is the span ID passed via upstream headers.
SpanID string
// Flags is the flags int passed via upstream headers as a string.
Flags string
// Sampled is whether this span was sampled by the upstream caller. Uses
// a pointer to a bool so it can distinguish between set/not-set.
Sampled *bool
}
// AnySet returns true if any of the values in the Headers are set, false otherwise.
func (h Headers) AnySet() bool {
return h.TraceID != "" ||
h.SpanID != "" ||
h.Flags != "" ||
h.Sampled != nil
}
// ParseTraceID attempts to convert h.TraceID into a uint64, if it succeeds it
// returns the value and 'true'. If it fails, either because h.TraceID is not
// set or it is malformed, ok will be 'false' and you should not rely on the ID
// returned.
//
// If h.TraceID was malformed, an error will be logged using the global tracer's
// logger but no error will be returned.
func (h Headers) ParseTraceID() (id uint64, ok bool) {
if h.TraceID == "" {
return
}
var err error
id, err = strconv.ParseUint(h.TraceID, 10, 64)
if err != nil {
globalTracer.logger.Log(fmt.Sprintf(
"Malformed trace id in http ctx: %q, %v",
h.TraceID,
err,
))
return
}
ok = true
return
}
// ParseSpanID attempts to convert h.SpanID into a uint64, if it succeeds it
// returns the value and 'true'. If it fails, either because h.SpanID is not
// set or it is malformed, ok will be 'false' and you should not rely on the ID
// returned.
//
// If h.SpanID was malformed, an error will be logged using the global tracer's
// logger but no error will be returned.
func (h Headers) ParseSpanID() (id uint64, ok bool) {
if h.SpanID == "" {
return
}
var err error
id, err = strconv.ParseUint(h.SpanID, 10, 64)
if err != nil {
globalTracer.logger.Log(fmt.Sprintf(
"Malformed span id in http ctx: %q, %v",
h.SpanID,
err,
))
return
}
ok = true
return
}
// ParseFlags attempts to convert h.Flags into an int64, if it succeeds it
// returns the value and 'true'. If it fails, either because h.Flags is not
// set or it is malformed, ok will be 'false' and you should not rely on the ID
// returned.
//
// If h.Flags was malformed, an error will be logged using the global tracer's
// logger but no error will be returned.
func (h Headers) ParseFlags() (flags int64, ok bool) {
if h.Flags == "" {
return
}
var err error
flags, err = strconv.ParseInt(h.Flags, 10, 64)
if err != nil {
globalTracer.logger.Log(fmt.Sprintf(
"Malformed flags in http ctx: %q, %v",
h.Flags,
err,
))
return
}
ok = true
return
}
// ParseSampled returns the boolean value of h.Sampled and a flag specifying
// whether h.Sampled was set or not. If it not set, both "sampled" and "ok"
// will return "false" but that does not mean that "sampled" should be false, you
// should only used the returned value for "sampled" if "ok" is true.
func (h Headers) ParseSampled() (sampled bool, ok bool) {
if h.Sampled == nil {
return false, false
}
return *h.Sampled, true
}
// StartSpanFromHeaders creates a server span from the passed in Headers. If no
// headers are set, then a new top-level server span will be created and returned.
//
// Please note that "Sampled" header is default to false according to baseplate
// spec, so if the headers are incorrect, this span (and all its child-spans)
// will never be sampled, unless debug flag was set explicitly later.
//
// If any headers are missing or malformed, they will be ignored.
// Malformed headers will be logged if InitGlobalTracer was last called with a
// non-nil logger.
func StartSpanFromHeaders(ctx context.Context, name string, headers Headers) (context.Context, *Span) {
if !headers.AnySet() {
return StartTopLevelServerSpan(ctx, name)
}
span := newSpan(nil, name, SpanTypeServer)
defer func() {
onCreateServerSpan(span)
span.onStart()
}()
ctx = opentracing.ContextWithSpan(ctx, span)
if id, ok := headers.ParseTraceID(); ok {
span.trace.traceID = id
}
if id, ok := headers.ParseSpanID(); ok {
span.trace.parentID = id
}
if flags, ok := headers.ParseFlags(); ok {
span.trace.flags = flags
}
if sampled, ok := headers.ParseSampled(); ok {
span.trace.sampled = sampled
}
initRootSpan(span)
ctx = span.InjectTraceContext(ctx)
return ctx, span
}
// initRootSpan is the other half of initChildSpan.
//
// One of initRootSpan and initChildSpan MUST be called for every span created.
// This function should be called AFTER we set the trace id correctly.
//
// Note that the conception of "root" here is slightly counterintuitive.
// It includes spans that their parent is not in this process
// (e.g. the first span created from the request handler,
// while their parent is on the client side).
// It doesn't necessarily mean top level traces.
//
// It also doesn't necessarily mean the span must be a server span.
func initRootSpan(s *Span) {
hub := sentry.CurrentHub()
if hub == nil {
// This shouldn't happen, but just in case to avoid panic.
hub = getNopHub()
} else {
hub = hub.Clone()
}
hub.ConfigureScope(func(scope *sentry.Scope) {
scope.SetTag("trace_id", strconv.FormatUint(s.TraceID(), 10))
})
s.hub = hub
}
var nopHub = sentry.NewHub(nil, sentry.NewScope())
func getNopHub() *sentry.Hub {
// Whenever this function is called, it means we had a bug that didn't
// initialize the spans correctly.
globalTracer.logger.Log("getNopHub called.")
return nopHub
}
| {
s.hooks = append(s.hooks, hook)
} |
RADLAN-IpRouter.py | #
# PySNMP MIB module RADLAN-IpRouter (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/RADLAN-IpRouter
# Produced by pysmi-0.3.4 at Wed May 1 14:47:13 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueRangeConstraint, SingleValueConstraint, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ValueSizeConstraint")
AreaID, ospfVirtIfEntry, ospfIfEntry, RouterID = mibBuilder.importSymbols("OSPF-MIB", "AreaID", "ospfVirtIfEntry", "ospfIfEntry", "RouterID")
ipSpec, rip2Spec, ipRouteLeaking, rlIpRoutingProtPreference, ipRedundancy, ipRipFilter, rlOspf = mibBuilder.importSymbols("RADLAN-IP", "ipSpec", "rip2Spec", "ipRouteLeaking", "rlIpRoutingProtPreference", "ipRedundancy", "ipRipFilter", "rlOspf")
rip2IfConfEntry, = mibBuilder.importSymbols("RFC1389-MIB", "rip2IfConfEntry")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Counter64, Bits, Integer32, MibIdentifier, TimeTicks, ObjectIdentity, iso, Counter32, Unsigned32, Gauge32, NotificationType, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "Bits", "Integer32", "MibIdentifier", "TimeTicks", "ObjectIdentity", "iso", "Counter32", "Unsigned32", "Gauge32", "NotificationType", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity")
DisplayString, TextualConvention, TruthValue, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "TruthValue", "RowStatus")
rlIpRouter = ModuleIdentity((1, 3, 6, 1, 4, 1, 89, 26, 18))
rlIpRouter.setRevisions(('2004-06-01 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: rlIpRouter.setRevisionsDescriptions(('Initial version of this MIB.',))
if mibBuilder.loadTexts: rlIpRouter.setLastUpdated('200406010000Z')
if mibBuilder.loadTexts: rlIpRouter.setOrganization('')
if mibBuilder.loadTexts: rlIpRouter.setContactInfo('')
if mibBuilder.loadTexts: rlIpRouter.setDescription('The private MIB module definition for router MIB.')
rsRip2IfConfTable = MibTable((1, 3, 6, 1, 4, 1, 89, 26, 3, 1), )
if mibBuilder.loadTexts: rsRip2IfConfTable.setStatus('current')
if mibBuilder.loadTexts: rsRip2IfConfTable.setDescription('This table is extension of rip2IfConfTable (RFC 1389 ,RIP 2)')
rsRip2IfConfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 26, 3, 1, 1), ).setIndexNames((0, "RADLAN-IpRouter", "rsRip2IfConfAddress"))
if mibBuilder.loadTexts: rsRip2IfConfEntry.setStatus('current')
if mibBuilder.loadTexts: rsRip2IfConfEntry.setDescription('The row definition for this table.')
rsRip2IfConfAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 3, 1, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsRip2IfConfAddress.setStatus('current')
if mibBuilder.loadTexts: rsRip2IfConfAddress.setDescription(' The IP Address of this system on the indicated subnet. ')
rsRip2IfConfVirtualDis = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 3, 1, 1, 2), Integer32().clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsRip2IfConfVirtualDis.setStatus('current')
if mibBuilder.loadTexts: rsRip2IfConfVirtualDis.setDescription('This variable defines the virtual number of hops assigned to the interface specified by rsIfIpAddrIndex. This enables fine-tuning of the RIP routing algorithm.')
rsRip2IfConfAutoSend = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsRip2IfConfAutoSend.setStatus('current')
if mibBuilder.loadTexts: rsRip2IfConfAutoSend.setDescription('This variable controls RIP automatic send behavior. If enabled and no routers were heard on the interface, RIP will only send default route with high metric. Otherwise RIP will send updates according to configuration. ')
rlRip2IfConfKeyChain = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 3, 1, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlRip2IfConfKeyChain.setStatus('current')
if mibBuilder.loadTexts: rlRip2IfConfKeyChain.setDescription('Name of the key-chain which rip2 interface uses for md5 authentication')
rlRip2AutoInterfaceCreation = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlRip2AutoInterfaceCreation.setStatus('current')
if mibBuilder.loadTexts: rlRip2AutoInterfaceCreation.setDescription('This variable controls RIP automatic creation and activation of interfaces. If value is enable - IP interface creation results in creation and activation of rip Interface. If value is disable Rip interface is created but not activated. The option is a platform parameter.')
rlRip2MibVersion = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 3, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlRip2MibVersion.setStatus('current')
if mibBuilder.loadTexts: rlRip2MibVersion.setDescription("MIB's version, the current version is 1.")
ipRedundAdminStatus = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 6, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipRedundAdminStatus.setStatus('current')
if mibBuilder.loadTexts: ipRedundAdminStatus.setDescription('This parameter controls the IP Redundancy in the device. In case the parameter is Enable and the other router becomes inoperational, all the traffic is handled by this element.')
ipRedundOperStatus = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 6, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("active", 1), ("inactive", 2))).clone('inactive')).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipRedundOperStatus.setStatus('current')
if mibBuilder.loadTexts: ipRedundOperStatus.setDescription(' obsolete ')
ipRedundRoutersTable = MibTable((1, 3, 6, 1, 4, 1, 89, 26, 6, 3), )
if mibBuilder.loadTexts: ipRedundRoutersTable.setStatus('current')
if mibBuilder.loadTexts: ipRedundRoutersTable.setDescription('List of IP addresses backed up by this router.')
ipRedundRoutersEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 26, 6, 3, 1), ).setIndexNames((0, "RADLAN-IpRouter", "ipRedundRoutersIfAddr"), (0, "RADLAN-IpRouter", "ipRedundRoutersMainRouterAddr"))
if mibBuilder.loadTexts: ipRedundRoutersEntry.setStatus('current')
if mibBuilder.loadTexts: ipRedundRoutersEntry.setDescription(' The row definition for this table.')
ipRedundRoutersIfAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 6, 3, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipRedundRoutersIfAddr.setStatus('current')
if mibBuilder.loadTexts: ipRedundRoutersIfAddr.setDescription('The Ip address of the IP interface on which the redundancy feature is operational.')
ipRedundRoutersMainRouterAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 6, 3, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipRedundRoutersMainRouterAddr.setStatus('current')
if mibBuilder.loadTexts: ipRedundRoutersMainRouterAddr.setDescription('The Ip address of the polled main router.')
ipRedundRoutersOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 6, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("active", 1), ("inactive", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipRedundRoutersOperStatus.setStatus('current')
if mibBuilder.loadTexts: ipRedundRoutersOperStatus.setDescription('If active, the main router is considered inoperational and the IP interface operates as its backup.')
ipRedundRoutersPollInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 6, 3, 1, 4), Integer32().clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipRedundRoutersPollInterval.setStatus('current')
if mibBuilder.loadTexts: ipRedundRoutersPollInterval.setDescription('Polling interval for this router (in seconds). If 0 the router is not polled.')
ipRedundRoutersTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 6, 3, 1, 5), Integer32().clone(12)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipRedundRoutersTimeout.setStatus('current')
if mibBuilder.loadTexts: ipRedundRoutersTimeout.setDescription('Interval in seconds during which the backed-up router must signal. If it does not signal, it is considered inoperational and the IP interface starts operating as backup.')
ipRedundRoutersStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 6, 3, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("active", 1), ("notInService", 2), ("notReady", 3), ("createAndGo", 4), ("createAndWait", 5), ("destroy", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipRedundRoutersStatus.setStatus('current')
if mibBuilder.loadTexts: ipRedundRoutersStatus.setDescription('Entry status')
ipLeakStaticToRip = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 7, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipLeakStaticToRip.setStatus('current')
if mibBuilder.loadTexts: ipLeakStaticToRip.setDescription('This parameter controls leaking (redistribution) of static routes to RIP. When enabled, all routes inserted to the IP routing table via SNMP are advertised into RIP.')
ipLeakStaticToOspf = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 7, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipLeakStaticToOspf.setStatus('current')
if mibBuilder.loadTexts: ipLeakStaticToOspf.setDescription('This parameter controls leaking (redistribution) of static routes into OSPF. When enabled, all routes inserted to the IP routing table via SNMP are advertised into OSPF as external routes.')
ipLeakOspfToRip = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 7, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipLeakOspfToRip.setStatus('current')
if mibBuilder.loadTexts: ipLeakOspfToRip.setDescription('This parameter controls leaking (redistribution) of routes from OSPF to RIP. If enabled, all routes learned via OSPF are advertised into RIP.')
ipLeakRipToOspf = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 7, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipLeakRipToOspf.setStatus('current')
if mibBuilder.loadTexts: ipLeakRipToOspf.setDescription('This parameter controls leaking (redistribution) of routes from RIP to OSPF. If enabled, all routes learned via RIP are advertised into OSPF as external routes.')
ipLeakExtDirectToOspf = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 7, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipLeakExtDirectToOspf.setStatus('current')
if mibBuilder.loadTexts: ipLeakExtDirectToOspf.setDescription('This parameter controls leaking (redistribution) into OSPF of direct routes external to OSPF, i.e. routes to local network corresponding to IP interfaces on which OSPF is disabled. When enabled, all such direct routes are advertised into OSPF as external routes.')
rsIpRipFilterGlbTable = MibTable((1, 3, 6, 1, 4, 1, 89, 26, 8, 1), )
if mibBuilder.loadTexts: rsIpRipFilterGlbTable.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterGlbTable.setDescription('The table of RIP global filters per IP router.')
rsIpRipFilterGlbEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 26, 8, 1, 1), ).setIndexNames((0, "RADLAN-IpRouter", "rsIpRipFilterGlbType"), (0, "RADLAN-IpRouter", "rsIpRipFilterGlbNumber"))
if mibBuilder.loadTexts: rsIpRipFilterGlbEntry.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterGlbEntry.setDescription(' An entry in the RIP global filter table ')
rsIpRipFilterGlbType = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("input", 1), ("output", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsIpRipFilterGlbType.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterGlbType.setDescription(' Type of filter - input/output ')
rsIpRipFilterGlbNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsIpRipFilterGlbNumber.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterGlbNumber.setDescription(' Number of RIP filter. ')
rsIpRipFilterGlbStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("valid", 1), ("invalid", 2), ("underCreation", 3))).clone('valid')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsIpRipFilterGlbStatus.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterGlbStatus.setDescription(' The validity of this entry. Setting this value to invalid deletes the entry, and the entry may be actualy removed from the table.')
rsIpRipFilterGlbIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 1, 1, 4), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsIpRipFilterGlbIpAddr.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterGlbIpAddr.setDescription(' network prefix IP address, as in the forwarding table. ')
rsIpRipFilterGlbNetworkMaskBits = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 1, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsIpRipFilterGlbNetworkMaskBits.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterGlbNetworkMaskBits.setDescription(' the number of bits in the IP Network mask, called network-prefix-length in Router Requirements terminology. for example: the value 16 means mask 255.255.0.0 ')
rsIpRipFilterGlbMatchBits = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 1, 1, 6), Integer32().clone(32)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsIpRipFilterGlbMatchBits.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterGlbMatchBits.setDescription(' the number of bits to match in the Network IP address. A value smaller than 32 defines a wildcard. for example: the value 8 means all routes whose leftmost 8 bits are equal to those of the network IP address. If this variable has a value other than 32, than rsIpRipFilterGlbNetworkMaskBits must be 0 and is ignored. ')
rsIpRipFilterGlbAction = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("deny", 1), ("permit", 2))).clone('permit')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsIpRipFilterGlbAction.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterGlbAction.setDescription(' Filter action - permit/deny for this network')
rsIpRipFilterLclTable = MibTable((1, 3, 6, 1, 4, 1, 89, 26, 8, 2), )
if mibBuilder.loadTexts: rsIpRipFilterLclTable.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterLclTable.setDescription('Table of input/output RIP filters used per IP Interface.')
rsIpRipFilterLclEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 26, 8, 2, 1), ).setIndexNames((0, "RADLAN-IpRouter", "rsIpRipFilterLclIpIntf"), (0, "RADLAN-IpRouter", "rsIpRipFilterLclType"), (0, "RADLAN-IpRouter", "rsIpRipFilterLclNumber"))
if mibBuilder.loadTexts: rsIpRipFilterLclEntry.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterLclEntry.setDescription(' An entry in the Intf RIP filter table')
rsIpRipFilterLclIpIntf = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 2, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsIpRipFilterLclIpIntf.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterLclIpIntf.setDescription(' The IP address identifying the RIP interface for this filter. This value corresponds to rsIpAdEntAddr. ')
rsIpRipFilterLclType = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("input", 1), ("output", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsIpRipFilterLclType.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterLclType.setDescription(' Type of filter - input/output ')
rsIpRipFilterLclNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 2, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsIpRipFilterLclNumber.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterLclNumber.setDescription(' Number of RIP filter for this Interface')
rsIpRipFilterLclStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("valid", 1), ("invalid", 2), ("underCreation", 3))).clone('valid')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsIpRipFilterLclStatus.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterLclStatus.setDescription(' The validity of this entry. Setting this value to invalid deletes the entry, and the entry may be actualy removed from the table.')
rsIpRipFilterLclIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 2, 1, 5), IpAddress().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsIpRipFilterLclIpAddr.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterLclIpAddr.setDescription(' network prefix IP address, as in the forwarding table. ')
rsIpRipFilterLclNetworkMaskBits = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 2, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsIpRipFilterLclNetworkMaskBits.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterLclNetworkMaskBits.setDescription(' the number of bits in the IP Network mask, called network-prefix-length in Router Requirements terminology. for example: the value 16 means mask 255.255.0.0 ')
rsIpRipFilterLclMatchBits = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 2, 1, 7), Integer32().clone(32)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsIpRipFilterLclMatchBits.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterLclMatchBits.setDescription(' the number of bits to match in the Network IP address. A value smaller than 32 defines a wildcard. for example: the value 8 means all routes whose leftmost 8 bits are equal to those of the network IP address. If this variable has a value other than 32, than rsIpRipFilterLclNetworkMaskBits must be 0 and is ignored. ')
rsIpRipFilterLclAction = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 8, 2, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("deny", 1), ("permit", 2))).clone('permit')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsIpRipFilterLclAction.setStatus('current')
if mibBuilder.loadTexts: rsIpRipFilterLclAction.setDescription(' Filter action - permit/deny ')
rlIpRoutingProtPreferenceDirect = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 13, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 254)).clone(20)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceDirect.setStatus('current')
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceDirect.setDescription('The Preference given to routes whose origin is Local IP (i.e. IP interface in IpAddrTable) It is proposed that the value will be higher than dynamic routing protocols. The change of its value may lead to unexpected results, such as routing loops')
rlIpRoutingProtPreferenceStatic = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 13, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceStatic.setStatus('current')
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceStatic.setDescription('The Preference given to routes whose origin is Men configured by Net managment tools, i.e. Command line or SNMP configured.')
rlIpRoutingProtPreferenceOspfInter = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 13, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceOspfInter.setStatus('current')
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceOspfInter.setDescription('The Preference given to routes whose origin is internal ospf Links. Relate to routes which are based on OSPF Link State Advertisements of type 1-4')
rlIpRoutingProtPreferenceOspfExt = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 13, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceOspfExt.setStatus('current')
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceOspfExt.setDescription('The Preference given to routes whose origin is external to OSPF i.e. routes imported by as OSPF AS Border router. Relate to routes which are based on OSPF Link State Advertisements of types 5 and 7')
rlIpRoutingProtPreferenceOspfReject = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 13, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(254)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceOspfReject.setStatus('current')
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceOspfReject.setDescription('The Preference given to routes whose origin is OSPF and Are inserted to cover gaps in net range')
rlIpRoutingProtPreferenceRipNormal = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 13, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceRipNormal.setStatus('current')
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceRipNormal.setDescription('The Preference given to routes whose origin is RIP routing domain')
rlIpRoutingProtPreferenceRipAggregate = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 13, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(254)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceRipAggregate.setStatus('current')
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceRipAggregate.setDescription('The Preference given to routes whose origin is aggregation As a method of rip1 to handle the CIDR schema. The idea is that ripv1 aggregates route which fall into certion class of IP. This route is a discard route in effect, and is referenced, at forwarding route look up, if there is no beter match. (which means the route is not available)')
rlIpRoutingProtPreferenceBgp = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 13, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255)).clone(80)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceBgp.setStatus('current')
if mibBuilder.loadTexts: rlIpRoutingProtPreferenceBgp.setDescription('The Preference given to routes whose origin is BGP ROUTERS (EBGP or IBGP)')
rlOspfMibVersion = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 14, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfMibVersion.setStatus('current')
if mibBuilder.loadTexts: rlOspfMibVersion.setDescription("MIB's version, the current version is 1.")
rlOspfAutoInterfaceCreation = MibScalar((1, 3, 6, 1, 4, 1, 89, 26, 14, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAutoInterfaceCreation.setStatus('current')
if mibBuilder.loadTexts: rlOspfAutoInterfaceCreation.setDescription('This variable controls OSPF automatic creation and activation of interfaces. If value is enable - IP interface creation results in creation and activation of OSPF Interface. If value is disable OSPF interface is created but not activated. The option is a platform parameter.')
rlOspfIfExtTable = MibTable((1, 3, 6, 1, 4, 1, 89, 26, 14, 3), )
if mibBuilder.loadTexts: rlOspfIfExtTable.setStatus('current')
if mibBuilder.loadTexts: rlOspfIfExtTable.setDescription('The OSPF Interface Table describes the interfaces from the viewpoint of OSPF.')
rlOspfIfExtEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 26, 14, 3, 1), )
ospfIfEntry.registerAugmentions(("RADLAN-IpRouter", "rlOspfIfExtEntry"))
rlOspfIfExtEntry.setIndexNames(*ospfIfEntry.getIndexNames())
if mibBuilder.loadTexts: rlOspfIfExtEntry.setStatus('current')
if mibBuilder.loadTexts: rlOspfIfExtEntry.setDescription('The OSPF interface table extension for md5 authentication')
rlOspfifKeyChain = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 3, 1, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlOspfifKeyChain.setStatus('current')
if mibBuilder.loadTexts: rlOspfifKeyChain.setDescription('Name of the key-chain which ospf interface uses for md5 authentication')
rlOspfRtrLnkTable = MibTable((1, 3, 6, 1, 4, 1, 89, 26, 14, 4), )
if mibBuilder.loadTexts: rlOspfRtrLnkTable.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkTable.setDescription('Router Link State Advertisement.')
rlOspfRtrLnkEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1), ).setIndexNames((0, "RADLAN-IpRouter", "rlOspfRtrLnkAreaId"), (0, "RADLAN-IpRouter", "rlOspfRtrLnkLsid"), (0, "RADLAN-IpRouter", "rlOspfRtrLnkRouterId"), (0, "RADLAN-IpRouter", "rlOspfRtrLnkIdx"))
if mibBuilder.loadTexts: rlOspfRtrLnkEntry.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkEntry.setDescription('A single entry from Router LSA.')
rlOspfRtrLnkAreaId = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 1), AreaID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkAreaId.setReference('OSPF Version 2, Appendix C.2 Area parameters')
if mibBuilder.loadTexts: rlOspfRtrLnkAreaId.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkAreaId.setDescription('The 32 bit identifier of the Area from which the LSA was received.')
rlOspfRtrLnkLsid = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkLsid.setReference('OSPF Version 2, Section 12.1.4 Link State ID')
if mibBuilder.loadTexts: rlOspfRtrLnkLsid.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkLsid.setDescription('The Link State ID is an LS Type Specific field containing either a Router ID or an IP Address; it identifies the piece of the routing domain that is being described by the advertisement.')
rlOspfRtrLnkRouterId = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 3), RouterID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkRouterId.setReference('OSPF Version 2, Appendix C.1 Global parameters')
if mibBuilder.loadTexts: rlOspfRtrLnkRouterId.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkRouterId.setDescription('The 32 bit number that uniquely identifies the originating router in the Autonomous System.')
rlOspfRtrLnkIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkIdx.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkIdx.setDescription('The index is a unsigned 32-bit integer. It is used as sequence number of entry in the LSA and relevant only for Router or Network LSA which can contain unlimited number of entries.')
rlOspfRtrLnkSequence = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkSequence.setReference('OSPF Version 2, Section 12.1.6 LS sequence number')
if mibBuilder.loadTexts: rlOspfRtrLnkSequence.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkSequence.setDescription('The sequence number field is a signed 32-bit integer. It is used to detect old and duplicate link state advertisements. The space of sequence numbers is linearly ordered. The larger the sequence number the more recent the advertisement.')
rlOspfRtrLnkAge = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkAge.setReference('OSPF Version 2, Section 12.1.1 LS age')
if mibBuilder.loadTexts: rlOspfRtrLnkAge.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkAge.setDescription('This field is the age of the link state advertisement in seconds.')
rlOspfRtrLnkChecksum = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkChecksum.setReference('OSPF Version 2, Section 12.1.7 LS checksum')
if mibBuilder.loadTexts: rlOspfRtrLnkChecksum.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkChecksum.setDescription("This field is the checksum of the complete contents of the advertisement, excepting the age field. The age field is excepted so that an advertisement's age can be incremented without updating the checksum. The checksum used is the same that is used for ISO connectionless datagrams; it is commonly referred to as the Fletcher checksum.")
rlOspfRtrLnkLength = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 8), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkLength.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkLength.setDescription('The lenth in bytes of the LSA. This includes the 20 byte LSA header.')
rlOspfRtrLnkBitV = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("off", 1), ("on", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkBitV.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkBitV.setDescription('When set, the router is an endpoint of one or more fully adjacent virtual links having the described area as Transit area (V is for virtual link endpoint).')
rlOspfRtrLnkBitE = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("off", 1), ("on", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkBitE.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkBitE.setDescription('When set, the router is an AS boundary router (E is for external).')
rlOspfRtrLnkBitB = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("off", 1), ("on", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkBitB.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkBitB.setDescription('When set, the router is an area border router (B is for border).')
rlOspfRtrLnkLinks = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 12), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkLinks.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkLinks.setDescription('The number of router links described in this LSA. This must be the total collection of router links (i.e., interfaces) to the area.')
rlOspfRtrLnkLinkID = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 13), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkLinkID.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkLinkID.setDescription("Identifies the object that this router link connects to. Value depends on the link's Type.")
rlOspfRtrLnkLinkData = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 14), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkLinkData.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkLinkData.setDescription("Value depends on the link's Type field.")
rlOspfRtrLnkType = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("pointToPoint", 1), ("transitNetwork", 2), ("stubNetwork", 3), ("virtualLink", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkType.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkType.setDescription('A quick description of the router link.')
rlOspfRtrLnkMetric = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 4, 1, 16), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfRtrLnkMetric.setStatus('current')
if mibBuilder.loadTexts: rlOspfRtrLnkMetric.setDescription('The cost of using this router link.')
rlOspfNetLnkTable = MibTable((1, 3, 6, 1, 4, 1, 89, 26, 14, 5), )
if mibBuilder.loadTexts: rlOspfNetLnkTable.setStatus('current')
if mibBuilder.loadTexts: rlOspfNetLnkTable.setDescription('Network Link State Advertisement.')
rlOspfNetLnkEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 26, 14, 5, 1), ).setIndexNames((0, "RADLAN-IpRouter", "rlOspfNetLnkAreaId"), (0, "RADLAN-IpRouter", "rlOspfNetLnkLsid"), (0, "RADLAN-IpRouter", "rlOspfNetLnkRouterId"), (0, "RADLAN-IpRouter", "rlOspfNetLnkIdx"))
if mibBuilder.loadTexts: rlOspfNetLnkEntry.setStatus('current')
if mibBuilder.loadTexts: rlOspfNetLnkEntry.setDescription('A single entry from Network LSA.')
rlOspfNetLnkAreaId = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 5, 1, 1), AreaID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfNetLnkAreaId.setReference('OSPF Version 2, Appendix C.2 Area parameters')
if mibBuilder.loadTexts: rlOspfNetLnkAreaId.setStatus('current')
if mibBuilder.loadTexts: rlOspfNetLnkAreaId.setDescription('The 32 bit identifier of the Area from which the LSA was received.')
rlOspfNetLnkLsid = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 5, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfNetLnkLsid.setReference('OSPF Version 2, Section 12.1.4 Link State ID')
if mibBuilder.loadTexts: rlOspfNetLnkLsid.setStatus('current')
if mibBuilder.loadTexts: rlOspfNetLnkLsid.setDescription('The Link State ID is an LS Type Specific field containing either a Router ID or an IP Address; it identifies the piece of the routing domain that is being described by the advertisement.')
rlOspfNetLnkRouterId = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 5, 1, 3), RouterID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfNetLnkRouterId.setReference('OSPF Version 2, Appendix C.1 Global parameters')
if mibBuilder.loadTexts: rlOspfNetLnkRouterId.setStatus('current')
if mibBuilder.loadTexts: rlOspfNetLnkRouterId.setDescription('The 32 bit number that uniquely identifies the originating router in the Autonomous System.')
rlOspfNetLnkIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 5, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfNetLnkIdx.setStatus('current')
if mibBuilder.loadTexts: rlOspfNetLnkIdx.setDescription('The index is a unsigned 32-bit integer. It is used as sequence number of entry in the LSA and relevant only for Router or Network LSA which can contain unlimited number of entries.')
rlOspfNetLnkSequence = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 5, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfNetLnkSequence.setReference('OSPF Version 2, Section 12.1.6 LS sequence number')
if mibBuilder.loadTexts: rlOspfNetLnkSequence.setStatus('current')
if mibBuilder.loadTexts: rlOspfNetLnkSequence.setDescription('The sequence number field is a signed 32-bit integer. It is used to detect old and duplicate link state advertisements. The space of sequence numbers is linearly ordered. The larger the sequence number the more recent the advertisement.')
rlOspfNetLnkAge = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 5, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfNetLnkAge.setReference('OSPF Version 2, Section 12.1.1 LS age')
if mibBuilder.loadTexts: rlOspfNetLnkAge.setStatus('current')
if mibBuilder.loadTexts: rlOspfNetLnkAge.setDescription('This field is the age of the link state advertisement in seconds.')
rlOspfNetLnkChecksum = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 5, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfNetLnkChecksum.setReference('OSPF Version 2, Section 12.1.7 LS checksum')
if mibBuilder.loadTexts: rlOspfNetLnkChecksum.setStatus('current')
if mibBuilder.loadTexts: rlOspfNetLnkChecksum.setDescription("This field is the checksum of the complete contents of the advertisement, excepting the age field. The age field is excepted so that an advertisement's age can be incremented without updating the checksum. The checksum used is the same that is used for ISO connectionless datagrams; it is commonly referred to as the Fletcher checksum.")
rlOspfNetLnkLength = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 5, 1, 8), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfNetLnkLength.setStatus('current')
if mibBuilder.loadTexts: rlOspfNetLnkLength.setDescription('The lenth in bytes of the LSA. This includes the 20 byte LSA header.')
rlOspfNetLnkMask = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 5, 1, 9), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfNetLnkMask.setStatus('current')
if mibBuilder.loadTexts: rlOspfNetLnkMask.setDescription('The IP address mask for the network.')
rlOspfNetLnkAttRouter = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 5, 1, 10), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfNetLnkAttRouter.setStatus('current')
if mibBuilder.loadTexts: rlOspfNetLnkAttRouter.setDescription('The Router IDs of each of the routers attached to the network.')
rlOspfSumLnkTable = MibTable((1, 3, 6, 1, 4, 1, 89, 26, 14, 6), )
if mibBuilder.loadTexts: rlOspfSumLnkTable.setStatus('current')
if mibBuilder.loadTexts: rlOspfSumLnkTable.setDescription('Summary Link State Advertisement for network (Type 3).')
rlOspfSumLnkEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 26, 14, 6, 1), ).setIndexNames((0, "RADLAN-IpRouter", "rlOspfSumLnkAreaId"), (0, "RADLAN-IpRouter", "rlOspfSumLnkLsid"), (0, "RADLAN-IpRouter", "rlOspfSumLnkRouterId"))
if mibBuilder.loadTexts: rlOspfSumLnkEntry.setStatus('current')
if mibBuilder.loadTexts: rlOspfSumLnkEntry.setDescription('A single entry from Summary LSA.')
rlOspfSumLnkAreaId = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 6, 1, 1), AreaID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfSumLnkAreaId.setReference('OSPF Version 2, Appendix C.2 Area parameters')
if mibBuilder.loadTexts: rlOspfSumLnkAreaId.setStatus('current')
if mibBuilder.loadTexts: rlOspfSumLnkAreaId.setDescription('The 32 bit identifier of the Area from which the LSA was received.')
rlOspfSumLnkLsid = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 6, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfSumLnkLsid.setReference('OSPF Version 2, Section 12.1.4 Link State ID')
if mibBuilder.loadTexts: rlOspfSumLnkLsid.setStatus('current')
if mibBuilder.loadTexts: rlOspfSumLnkLsid.setDescription('The Link State ID is an LS Type Specific field containing either a Router ID or an IP Address; it identifies the piece of the routing domain that is being described by the advertisement.')
rlOspfSumLnkRouterId = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 6, 1, 3), RouterID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfSumLnkRouterId.setReference('OSPF Version 2, Appendix C.1 Global parameters')
if mibBuilder.loadTexts: rlOspfSumLnkRouterId.setStatus('current')
if mibBuilder.loadTexts: rlOspfSumLnkRouterId.setDescription('The 32 bit number that uniquely identifies the originating router in the Autonomous System.')
rlOspfSumLnkSequence = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 6, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfSumLnkSequence.setReference('OSPF Version 2, Section 12.1.6 LS sequence number')
if mibBuilder.loadTexts: rlOspfSumLnkSequence.setStatus('current')
if mibBuilder.loadTexts: rlOspfSumLnkSequence.setDescription('The sequence number field is a signed 32-bit integer. It is used to detect old and duplicate link state advertisements. The space of sequence numbers is linearly ordered. The larger the sequence number the more recent the advertisement.')
rlOspfSumLnkAge = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 6, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfSumLnkAge.setReference('OSPF Version 2, Section 12.1.1 LS age')
if mibBuilder.loadTexts: rlOspfSumLnkAge.setStatus('current')
if mibBuilder.loadTexts: rlOspfSumLnkAge.setDescription('This field is the age of the link state advertisement in seconds.')
rlOspfSumLnkChecksum = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 6, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfSumLnkChecksum.setReference('OSPF Version 2, Section 12.1.7 LS checksum')
if mibBuilder.loadTexts: rlOspfSumLnkChecksum.setStatus('current')
if mibBuilder.loadTexts: rlOspfSumLnkChecksum.setDescription("This field is the checksum of the complete contents of the advertisement, excepting the age field. The age field is excepted so that an advertisement's age can be incremented without updating the checksum. The checksum used is the same that is used for ISO connectionless datagrams; it is commonly referred to as the Fletcher checksum.")
rlOspfSumLnkLength = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 6, 1, 7), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfSumLnkLength.setStatus('current')
if mibBuilder.loadTexts: rlOspfSumLnkLength.setDescription('The lenth in bytes of the LSA. This includes the 20 byte LSA header.')
rlOspfSumLnkMask = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 6, 1, 8), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfSumLnkMask.setStatus('current')
if mibBuilder.loadTexts: rlOspfSumLnkMask.setDescription("Value depends on the link's Type field.")
rlOspfSumLnkMetric = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 6, 1, 9), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfSumLnkMetric.setStatus('current')
if mibBuilder.loadTexts: rlOspfSumLnkMetric.setDescription('The cost of using this router link.')
rlOspfAsbLnkTable = MibTable((1, 3, 6, 1, 4, 1, 89, 26, 14, 7), )
if mibBuilder.loadTexts: rlOspfAsbLnkTable.setStatus('current')
if mibBuilder.loadTexts: rlOspfAsbLnkTable.setDescription('Summary Link State Advertisement for ASBR (Type 4).')
rlOspfAsbLnkEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 26, 14, 7, 1), ).setIndexNames((0, "RADLAN-IpRouter", "rlOspfAsbLnkAreaId"), (0, "RADLAN-IpRouter", "rlOspfAsbLnkLsid"), (0, "RADLAN-IpRouter", "rlOspfAsbLnkRouterId"))
if mibBuilder.loadTexts: rlOspfAsbLnkEntry.setStatus('current')
if mibBuilder.loadTexts: rlOspfAsbLnkEntry.setDescription('A single entry from Summary LSA.')
rlOspfAsbLnkAreaId = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 7, 1, 1), AreaID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAsbLnkAreaId.setReference('OSPF Version 2, Appendix C.2 Area parameters')
if mibBuilder.loadTexts: rlOspfAsbLnkAreaId.setStatus('current')
if mibBuilder.loadTexts: rlOspfAsbLnkAreaId.setDescription('The 32 bit identifier of the Area from which the LSA was received.')
rlOspfAsbLnkLsid = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 7, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAsbLnkLsid.setReference('OSPF Version 2, Section 12.1.4 Link State ID')
if mibBuilder.loadTexts: rlOspfAsbLnkLsid.setStatus('current')
if mibBuilder.loadTexts: rlOspfAsbLnkLsid.setDescription('The Link State ID is an LS Type Specific field containing either a Router ID or an IP Address; it identifies the piece of the routing domain that is being described by the advertisement.')
rlOspfAsbLnkRouterId = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 7, 1, 3), RouterID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAsbLnkRouterId.setReference('OSPF Version 2, Appendix C.1 Global parameters')
if mibBuilder.loadTexts: rlOspfAsbLnkRouterId.setStatus('current')
if mibBuilder.loadTexts: rlOspfAsbLnkRouterId.setDescription('The 32 bit number that uniquely identifies the originating router in the Autonomous System.')
rlOspfAsbLnkSequence = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 7, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAsbLnkSequence.setReference('OSPF Version 2, Section 12.1.6 LS sequence number')
if mibBuilder.loadTexts: rlOspfAsbLnkSequence.setStatus('current')
if mibBuilder.loadTexts: rlOspfAsbLnkSequence.setDescription('The sequence number field is a signed 32-bit integer. It is used to detect old and duplicate link state advertisements. The space of sequence numbers is linearly ordered. The larger the sequence number the more recent the advertisement.')
rlOspfAsbLnkAge = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 7, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAsbLnkAge.setReference('OSPF Version 2, Section 12.1.1 LS age')
if mibBuilder.loadTexts: rlOspfAsbLnkAge.setStatus('current')
if mibBuilder.loadTexts: rlOspfAsbLnkAge.setDescription('This field is the age of the link state advertisement in seconds.')
rlOspfAsbLnkChecksum = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 7, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: |
if mibBuilder.loadTexts: rlOspfAsbLnkChecksum.setStatus('current')
if mibBuilder.loadTexts: rlOspfAsbLnkChecksum.setDescription("This field is the checksum of the complete contents of the advertisement, excepting the age field. The age field is excepted so that an advertisement's age can be incremented without updating the checksum. The checksum used is the same that is used for ISO connectionless datagrams; it is commonly referred to as the Fletcher checksum.")
rlOspfAsbLnkLength = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 7, 1, 7), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAsbLnkLength.setStatus('current')
if mibBuilder.loadTexts: rlOspfAsbLnkLength.setDescription('The lenth in bytes of the LSA. This includes the 20 byte LSA header.')
rlOspfAsbLnkMetric = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 7, 1, 8), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAsbLnkMetric.setStatus('current')
if mibBuilder.loadTexts: rlOspfAsbLnkMetric.setDescription('The cost of using this router link.')
rlOspfAseLnkTable = MibTable((1, 3, 6, 1, 4, 1, 89, 26, 14, 8), )
if mibBuilder.loadTexts: rlOspfAseLnkTable.setStatus('current')
if mibBuilder.loadTexts: rlOspfAseLnkTable.setDescription('External Link State Advertisement.')
rlOspfAseLnkEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 26, 14, 8, 1), ).setIndexNames((0, "RADLAN-IpRouter", "rlOspfAseLnkLsid"), (0, "RADLAN-IpRouter", "rlOspfAseLnkRouterId"))
if mibBuilder.loadTexts: rlOspfAseLnkEntry.setStatus('current')
if mibBuilder.loadTexts: rlOspfAseLnkEntry.setDescription('A single entry from External LSA.')
rlOspfAseLnkLsid = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 8, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAseLnkLsid.setReference('OSPF Version 2, Section 12.1.4 Link State ID')
if mibBuilder.loadTexts: rlOspfAseLnkLsid.setStatus('current')
if mibBuilder.loadTexts: rlOspfAseLnkLsid.setDescription('The Link State ID is an LS Type Specific field containing either a Router ID or an IP Address; it identifies the piece of the routing domain that is being described by the advertisement.')
rlOspfAseLnkRouterId = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 8, 1, 2), RouterID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAseLnkRouterId.setReference('OSPF Version 2, Appendix C.1 Global parameters')
if mibBuilder.loadTexts: rlOspfAseLnkRouterId.setStatus('current')
if mibBuilder.loadTexts: rlOspfAseLnkRouterId.setDescription('The 32 bit number that uniquely identifies the originating router in the Autonomous System.')
rlOspfAseLnkSequence = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 8, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAseLnkSequence.setReference('OSPF Version 2, Section 12.1.6 LS sequence number')
if mibBuilder.loadTexts: rlOspfAseLnkSequence.setStatus('current')
if mibBuilder.loadTexts: rlOspfAseLnkSequence.setDescription('The sequence number field is a signed 32-bit integer. It is used to detect old and duplicate link state advertisements. The space of sequence numbers is linearly ordered. The larger the sequence number the more recent the advertisement.')
rlOspfAseLnkAge = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 8, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAseLnkAge.setReference('OSPF Version 2, Section 12.1.1 LS age')
if mibBuilder.loadTexts: rlOspfAseLnkAge.setStatus('current')
if mibBuilder.loadTexts: rlOspfAseLnkAge.setDescription('This field is the age of the link state advertisement in seconds.')
rlOspfAseLnkChecksum = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 8, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAseLnkChecksum.setReference('OSPF Version 2, Section 12.1.7 LS checksum')
if mibBuilder.loadTexts: rlOspfAseLnkChecksum.setStatus('current')
if mibBuilder.loadTexts: rlOspfAseLnkChecksum.setDescription("This field is the checksum of the complete contents of the advertisement, excepting the age field. The age field is excepted so that an advertisement's age can be incremented without updating the checksum. The checksum used is the same that is used for ISO connectionless datagrams; it is commonly referred to as the Fletcher checksum.")
rlOspfAseLnkLength = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 8, 1, 6), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAseLnkLength.setStatus('current')
if mibBuilder.loadTexts: rlOspfAseLnkLength.setDescription('The lenth in bytes of the LSA. This includes the 20 byte LSA header.')
rlOspfAseLnkMask = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 8, 1, 7), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAseLnkMask.setStatus('current')
if mibBuilder.loadTexts: rlOspfAseLnkMask.setDescription("Value depends on the link's Type field.")
rlOspfAseLnkFrwAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 8, 1, 8), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAseLnkFrwAddress.setStatus('current')
if mibBuilder.loadTexts: rlOspfAseLnkFrwAddress.setDescription("Data traffic for the advertised destination will be forwarded to this address. If the Forwarding address is set to 0.0.0.0, data traffic will be forwarded instead to the LSA's originator (i.e., the responsible AS boundary router).")
rlOspfAseLnkBitE = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 8, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("off", 1), ("on", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAseLnkBitE.setStatus('current')
if mibBuilder.loadTexts: rlOspfAseLnkBitE.setDescription('The type of external metric. If bit E is set, the metric specified is a Type 2 external metric.')
rlOspfAseLnkMetric = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 8, 1, 10), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAseLnkMetric.setStatus('current')
if mibBuilder.loadTexts: rlOspfAseLnkMetric.setDescription('The cost of this route.')
rlOspfAseLnkTag = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 8, 1, 11), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlOspfAseLnkTag.setStatus('current')
if mibBuilder.loadTexts: rlOspfAseLnkTag.setDescription('A 32-bit field attached to each external route.')
rlospfVirtIfExtTable = MibTable((1, 3, 6, 1, 4, 1, 89, 26, 14, 9), )
if mibBuilder.loadTexts: rlospfVirtIfExtTable.setStatus('current')
if mibBuilder.loadTexts: rlospfVirtIfExtTable.setDescription('The Virtual Interface Table describes the virtual links that the OSPF Process is configured to carry on.')
rlospfVirtIfExtEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 26, 14, 9, 1), )
ospfVirtIfEntry.registerAugmentions(("RADLAN-IpRouter", "rlospfVirtIfExtEntry"))
rlospfVirtIfExtEntry.setIndexNames(*ospfVirtIfEntry.getIndexNames())
if mibBuilder.loadTexts: rlospfVirtIfExtEntry.setStatus('current')
if mibBuilder.loadTexts: rlospfVirtIfExtEntry.setDescription('The OSPF virtual interface table extension for md5 authentication')
rlospfVirtifKeyChain = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 26, 14, 9, 1, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlospfVirtifKeyChain.setStatus('current')
if mibBuilder.loadTexts: rlospfVirtifKeyChain.setDescription('Name of the key-chain which ospf virtual interface uses for md5 authentication')
mibBuilder.exportSymbols("RADLAN-IpRouter", rlOspfAsbLnkSequence=rlOspfAsbLnkSequence, rsRip2IfConfAutoSend=rsRip2IfConfAutoSend, rlIpRoutingProtPreferenceOspfInter=rlIpRoutingProtPreferenceOspfInter, rlOspfAutoInterfaceCreation=rlOspfAutoInterfaceCreation, rlOspfNetLnkAreaId=rlOspfNetLnkAreaId, ipRedundRoutersEntry=ipRedundRoutersEntry, rlOspfSumLnkSequence=rlOspfSumLnkSequence, rsIpRipFilterLclTable=rsIpRipFilterLclTable, rlRip2IfConfKeyChain=rlRip2IfConfKeyChain, rsIpRipFilterGlbStatus=rsIpRipFilterGlbStatus, rlOspfNetLnkLsid=rlOspfNetLnkLsid, rlOspfAseLnkLength=rlOspfAseLnkLength, rlIpRoutingProtPreferenceRipAggregate=rlIpRoutingProtPreferenceRipAggregate, ipLeakExtDirectToOspf=ipLeakExtDirectToOspf, rlOspfSumLnkChecksum=rlOspfSumLnkChecksum, rlIpRoutingProtPreferenceDirect=rlIpRoutingProtPreferenceDirect, rlIpRoutingProtPreferenceBgp=rlIpRoutingProtPreferenceBgp, rlospfVirtifKeyChain=rlospfVirtifKeyChain, rsIpRipFilterLclMatchBits=rsIpRipFilterLclMatchBits, rsIpRipFilterGlbEntry=rsIpRipFilterGlbEntry, rlOspfifKeyChain=rlOspfifKeyChain, ipRedundRoutersOperStatus=ipRedundRoutersOperStatus, ipRedundRoutersPollInterval=ipRedundRoutersPollInterval, rlOspfAsbLnkRouterId=rlOspfAsbLnkRouterId, rlOspfRtrLnkBitE=rlOspfRtrLnkBitE, rlOspfSumLnkAreaId=rlOspfSumLnkAreaId, rlOspfNetLnkChecksum=rlOspfNetLnkChecksum, rsIpRipFilterLclStatus=rsIpRipFilterLclStatus, rlRip2AutoInterfaceCreation=rlRip2AutoInterfaceCreation, rlOspfAseLnkEntry=rlOspfAseLnkEntry, rlOspfSumLnkTable=rlOspfSumLnkTable, rlOspfNetLnkSequence=rlOspfNetLnkSequence, rlOspfAseLnkTag=rlOspfAseLnkTag, rlOspfAsbLnkTable=rlOspfAsbLnkTable, rsRip2IfConfVirtualDis=rsRip2IfConfVirtualDis, rlOspfMibVersion=rlOspfMibVersion, rsIpRipFilterGlbNumber=rsIpRipFilterGlbNumber, rsIpRipFilterLclIpIntf=rsIpRipFilterLclIpIntf, rlOspfAseLnkFrwAddress=rlOspfAseLnkFrwAddress, rlOspfRtrLnkChecksum=rlOspfRtrLnkChecksum, rlOspfAseLnkSequence=rlOspfAseLnkSequence, rlOspfIfExtEntry=rlOspfIfExtEntry, rlOspfRtrLnkRouterId=rlOspfRtrLnkRouterId, rlOspfNetLnkAge=rlOspfNetLnkAge, ipLeakStaticToRip=ipLeakStaticToRip, rlIpRouter=rlIpRouter, rlOspfRtrLnkAreaId=rlOspfRtrLnkAreaId, rlOspfSumLnkLength=rlOspfSumLnkLength, ipRedundAdminStatus=ipRedundAdminStatus, rlOspfRtrLnkAge=rlOspfRtrLnkAge, rlOspfRtrLnkBitV=rlOspfRtrLnkBitV, rlOspfRtrLnkTable=rlOspfRtrLnkTable, ipRedundOperStatus=ipRedundOperStatus, rlIpRoutingProtPreferenceOspfExt=rlIpRoutingProtPreferenceOspfExt, rsRip2IfConfTable=rsRip2IfConfTable, rlOspfRtrLnkBitB=rlOspfRtrLnkBitB, rlOspfAsbLnkAreaId=rlOspfAsbLnkAreaId, rlIpRoutingProtPreferenceOspfReject=rlIpRoutingProtPreferenceOspfReject, rlOspfNetLnkRouterId=rlOspfNetLnkRouterId, rlOspfRtrLnkLinkID=rlOspfRtrLnkLinkID, rsIpRipFilterLclType=rsIpRipFilterLclType, rsIpRipFilterLclEntry=rsIpRipFilterLclEntry, rlOspfRtrLnkType=rlOspfRtrLnkType, ipRedundRoutersIfAddr=ipRedundRoutersIfAddr, rlOspfRtrLnkMetric=rlOspfRtrLnkMetric, ipLeakOspfToRip=ipLeakOspfToRip, rlOspfAseLnkTable=rlOspfAseLnkTable, rsIpRipFilterGlbIpAddr=rsIpRipFilterGlbIpAddr, rlOspfNetLnkEntry=rlOspfNetLnkEntry, rlOspfAsbLnkLsid=rlOspfAsbLnkLsid, rlOspfNetLnkMask=rlOspfNetLnkMask, rlOspfAseLnkMask=rlOspfAseLnkMask, rlOspfAseLnkLsid=rlOspfAseLnkLsid, rlOspfAsbLnkAge=rlOspfAsbLnkAge, rlOspfRtrLnkIdx=rlOspfRtrLnkIdx, rlOspfRtrLnkSequence=rlOspfRtrLnkSequence, rlOspfSumLnkAge=rlOspfSumLnkAge, rlOspfAsbLnkLength=rlOspfAsbLnkLength, rlospfVirtIfExtTable=rlospfVirtIfExtTable, rlOspfRtrLnkLsid=rlOspfRtrLnkLsid, rsIpRipFilterLclIpAddr=rsIpRipFilterLclIpAddr, rlIpRoutingProtPreferenceRipNormal=rlIpRoutingProtPreferenceRipNormal, rsIpRipFilterLclNumber=rsIpRipFilterLclNumber, rsIpRipFilterLclNetworkMaskBits=rsIpRipFilterLclNetworkMaskBits, rlOspfNetLnkTable=rlOspfNetLnkTable, rlOspfAsbLnkEntry=rlOspfAsbLnkEntry, rlOspfNetLnkIdx=rlOspfNetLnkIdx, rlOspfAsbLnkChecksum=rlOspfAsbLnkChecksum, rsRip2IfConfEntry=rsRip2IfConfEntry, PYSNMP_MODULE_ID=rlIpRouter, rlOspfAseLnkBitE=rlOspfAseLnkBitE, rsIpRipFilterLclAction=rsIpRipFilterLclAction, rlOspfSumLnkRouterId=rlOspfSumLnkRouterId, rlOspfRtrLnkLinkData=rlOspfRtrLnkLinkData, rlRip2MibVersion=rlRip2MibVersion, rlOspfSumLnkLsid=rlOspfSumLnkLsid, ipRedundRoutersTable=ipRedundRoutersTable, rlOspfSumLnkMask=rlOspfSumLnkMask, rlOspfAseLnkChecksum=rlOspfAseLnkChecksum, rlospfVirtIfExtEntry=rlospfVirtIfExtEntry, ipRedundRoutersStatus=ipRedundRoutersStatus, rsIpRipFilterGlbAction=rsIpRipFilterGlbAction, rlOspfAseLnkAge=rlOspfAseLnkAge, rlOspfAsbLnkMetric=rlOspfAsbLnkMetric, rsIpRipFilterGlbType=rsIpRipFilterGlbType, rlIpRoutingProtPreferenceStatic=rlIpRoutingProtPreferenceStatic, rlOspfAseLnkRouterId=rlOspfAseLnkRouterId, rsRip2IfConfAddress=rsRip2IfConfAddress, rsIpRipFilterGlbNetworkMaskBits=rsIpRipFilterGlbNetworkMaskBits, rlOspfRtrLnkEntry=rlOspfRtrLnkEntry, ipLeakStaticToOspf=ipLeakStaticToOspf, rlOspfRtrLnkLength=rlOspfRtrLnkLength, ipRedundRoutersMainRouterAddr=ipRedundRoutersMainRouterAddr, rlOspfNetLnkAttRouter=rlOspfNetLnkAttRouter, ipRedundRoutersTimeout=ipRedundRoutersTimeout, rsIpRipFilterGlbMatchBits=rsIpRipFilterGlbMatchBits, rlOspfSumLnkMetric=rlOspfSumLnkMetric, rlOspfRtrLnkLinks=rlOspfRtrLnkLinks, ipLeakRipToOspf=ipLeakRipToOspf, rlOspfSumLnkEntry=rlOspfSumLnkEntry, rsIpRipFilterGlbTable=rsIpRipFilterGlbTable, rlOspfNetLnkLength=rlOspfNetLnkLength, rlOspfIfExtTable=rlOspfIfExtTable, rlOspfAseLnkMetric=rlOspfAseLnkMetric)
| rlOspfAsbLnkChecksum.setReference('OSPF Version 2, Section 12.1.7 LS checksum') |
test28.js | var callbackArguments = [];
var argument1 = null;
var argument2 = null; | var argument3 = null;
var base_0 = [":,<2","nk","^","]","q","$","`"]
var r_0= undefined
try {
r_0 = base_0.some(argument1,argument2,argument3)
}
catch(e) {
r_0= "Error"
}
function serialize(array){
return array.map(function(a){
if (a === null || a == undefined) return a;
var name = a.constructor.name;
if (name==='Object' || name=='Boolean'|| name=='Array'||name=='Number'||name=='String')
return JSON.stringify(a);
return name;
});
}
setTimeout(function(){
require("fs").writeFileSync("./experiments/some/someRandom/test28.json",JSON.stringify({"baseObjects":serialize([base_0]),"returnObjects":serialize([r_0]),"callbackArgs":callbackArguments}))
},300) | |
redis_store.go | // Copyright (c) 2018 Uber Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cache
import (
"fmt"
"time"
"github.com/go-redis/redis"
)
const (
MaxRetires = 3
DialTimeout = 10 * time.Second
ReadTimeout = 10 * time.Second
WriteTimeout = 10 * time.Second
)
type redisStore struct {
cli *redis.Client
ttl time.Duration
}
// NewRedisStore returns a new instance of KVStore backed by a redis server.
// In this constructor we try to open a connection to redis. If that attempt fails
// we return an error. If it succeeds we just close that connection.
func NewRedisStore(addr string, ttl time.Duration) (KVStore, error) {
cli := redis.NewClient(&redis.Options{
Addr: addr,
MaxRetries: MaxRetires,
DialTimeout: DialTimeout,
ReadTimeout: ReadTimeout,
WriteTimeout: WriteTimeout,
})
if _, err := cli.Ping().Result(); err != nil {
return nil, err
}
return &redisStore{
cli: cli,
ttl: ttl,
}, nil
}
func (store *redisStore) Get(key string) (string, error) {
v, err := store.cli.Get(key).Result()
if err == redis.Nil {
return "", nil
} else if err != nil {
return "", fmt.Errorf("redis get key: %s", err)
}
return v, err
}
func (store *redisStore) Put(key, value string) error {
if _, err := store.cli.Set(key, value, store.ttl).Result(); err != nil {
return fmt.Errorf("redis set key: %s", err)
}
return nil
} |
func (store *redisStore) Cleanup() error { return nil } | |
siScore_utils.py | import glob
import torch
import numpy as np
from skimage import io, transform
from torchvision import transforms
import torchvision.transforms.functional as F
from torch.utils.data import Dataset
from PIL import Image
import random
class ClusterDataset(Dataset):
def __init__(self, cluster_list, dir_name, transform=None):
self.file_list = []
self.transform = transform
for cluster_num in cluster_list:
self.file_list.extend(glob.glob('../data/{}/{}/*.png'.format(dir_name, cluster_num)))
def __len__(self):
return len(self.file_list)
def __getitem__(self, idx):
image = io.imread(self.file_list[idx]) / 255.0
if self.transform:
image = self.transform(np.stack([image])).squeeze()
return image
class RandomRotate(object):
|
class Normalize(object):
def __init__(self, mean, std, inplace=False):
self.mean = mean
self.std = std
self.inplace = inplace
def __call__(self, images):
normalized = np.stack([F.normalize(x, self.mean, self.std, self.inplace) for x in images])
return normalized
class Grayscale(object):
def __init__(self, prob = 1):
self.prob = prob
def __call__(self, images):
random_num = np.random.randint(100, size=1)[0]
if random_num <= self.prob * 100:
gray_images = (images[:, 0, :, :] + images[:, 1, :, :] + images[:, 2, :, :]) / 3
gray_scaled = gray_images.unsqueeze(1).repeat(1, 3, 1, 1)
return gray_scaled
else:
return images
class ToTensor(object):
def __call__(self, images):
images = images.transpose((0, 3, 1, 2))
return torch.from_numpy(images).float()
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count | def __call__(self, images):
rotated = np.stack([self.random_rotate(x) for x in images])
return rotated
def random_rotate(self, image):
rand_num = np.random.randint(0, 4)
if rand_num == 0:
return np.rot90(image, k=1, axes=(0, 1))
elif rand_num == 1:
return np.rot90(image, k=2, axes=(0, 1))
elif rand_num == 2:
return np.rot90(image, k=3, axes=(0, 1))
else:
return image |
leaflet-map.component.ts | import { Component, OnInit } from '@angular/core';
import { SharedService } from '../../../layouts/shared-service';
import * as L from 'leaflet';
@Component({
selector: 'page-leaflet-map',
templateUrl: './leaflet-map.component.html',
styleUrls: ['./leaflet-map.component.scss']
})
export class | implements OnInit {
pageTitle: string = 'Leaflet Map';
lat: number = 50.4664212;
lng: number = 30.6;
constructor( private _sharedService: SharedService ) {
this._sharedService.emitChange(this.pageTitle);
}
ngOnInit(){
let mymap: any = L.map('map').setView([this.lat, this.lng], 13);
let circle:any = L.circle([this.lat, this.lng], {
color: '#dc143c',
fillColor: '#dc143c',
fillOpacity: 0.2,
radius: 800
}).addTo(mymap);
L.tileLayer('https://api.mapbox.com/styles/v1/mapbox/dark-v9/tiles/256/{z}/{x}/{y}?access_token=pk.eyJ1IjoibmV4dC1pdGVtIiwiYSI6ImNqMDFlYWRqeTAyNzEyd3FuNjQxdmVvMjgifQ.Ff8pEWrzeJ3uipr78e69uw', {
maxZoom: 18,
attribution: 'Map data © <a href="http://openstreetmap.org">OpenStreetMap</a> contributors, ' +
'<a href="http://creativecommons.org/licenses/by-sa/2.0/">CC-BY-SA</a>, ' +
'Imagery © <a href="http://mapbox.com">Mapbox</a>',
id: 'mapbox.streets'
}).addTo(mymap);
}
}
| PageLeafletMapComponent |
handlers.go | /*
Copyright The Helm Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package multitenant
import (
"bytes"
"fmt"
"io"
"net/http"
"os"
pathutil "path"
"strconv"
"time"
cm_storage "github.com/chartmuseum/storage"
cm_logger "helm.sh/chartmuseum/pkg/chartmuseum/logger"
cm_repo "helm.sh/chartmuseum/pkg/repo"
"helm.sh/helm/v3/pkg/chart"
"helm.sh/helm/v3/pkg/chart/loader"
helm_repo "helm.sh/helm/v3/pkg/repo"
"github.com/gin-gonic/gin"
"go.uber.org/zap"
)
var (
objectSavedResponse = gin.H{"saved": true}
objectDeletedResponse = gin.H{"deleted": true}
healthCheckResponse = gin.H{"healthy": true}
welcomePageHTML = []byte(`<!DOCTYPE html>
<html>
<head>
<title>Welcome to ChartMuseum!</title>
<style>
body {
width: 35em;
margin: 0 auto;
font-family: Tahoma, Verdana, Arial, sans-serif;
}
</style>
</head>
<body>
<h1>Welcome to ChartMuseum!</h1>
<p>If you see this page, the ChartMuseum web server is successfully installed and
working.</p>
<p>For online documentation and support please refer to the
<a href="https://github.com/helm/chartmuseum">GitHub project</a>.<br/>
<p><em>Thank you for using ChartMuseum.</em></p>
</body>
</html>
`)
)
type (
HTTPError struct {
Status int
Message string
}
)
type (
chartOrProvenanceFile struct {
filename string
content []byte
field string // file was extracted from this form field
}
filenameFromContentFn func([]byte) (string, error)
)
func (server *MultiTenantServer) getWelcomePageHandler(c *gin.Context) {
if server.WebTemplatePath != "" {
// Check if template file exists, otherwise return default welcome page
templateFilesExist := server.CheckTemplateFilesExist(server.WebTemplatePath, server.Logger)
if templateFilesExist {
c.HTML(http.StatusOK, "index.html", nil)
} else {
server.Logger.Warnf("No template files found in %s, fallback to default welcome page", server.WebTemplatePath)
c.Data(http.StatusOK, "text/html", welcomePageHTML)
}
} else {
c.Data(http.StatusOK, "text/html", welcomePageHTML)
}
}
func (server *MultiTenantServer) getStaticFilesHandler(c *gin.Context) {
staticFolder := fmt.Sprintf("%s/static", server.WebTemplatePath)
if _, err := os.Stat(staticFolder); !os.IsNotExist(err) {
c.File(fmt.Sprintf("%s%s", server.WebTemplatePath, c.Request.URL.Path))
}
}
func (server *MultiTenantServer) getInfoHandler(c *gin.Context) {
versionResponse := gin.H{"version": server.Version}
c.JSON(200, versionResponse)
}
func (server *MultiTenantServer) getHealthCheckHandler(c *gin.Context) {
c.JSON(200, healthCheckResponse)
}
func (server *MultiTenantServer) getIndexFileRequestHandler(c *gin.Context) {
repo := c.Param("repo")
log := server.Logger.ContextLoggingFn(c)
indexFile, err := server.getIndexFile(log, repo)
if err != nil {
c.JSON(err.Status, gin.H{"error": err.Message})
return
}
indexFile.IndexLock.RLock()
defer indexFile.IndexLock.RUnlock()
c.Data(200, indexFileContentType, indexFile.Raw)
}
func (server *MultiTenantServer) getStorageObjectRequestHandler(c *gin.Context) {
repo := c.Param("repo")
filename := c.Param("filename")
log := server.Logger.ContextLoggingFn(c)
storageObject, err := server.getStorageObject(log, repo, filename)
if err != nil {
c.JSON(err.Status, gin.H{"error": err.Message})
return
}
c.Data(200, storageObject.ContentType, storageObject.Content)
}
func (server *MultiTenantServer) getStorageObjectTemplateRequestHandler(c *gin.Context) {
repo := c.Param("repo")
name := c.Param("name")
version := c.Param("version")
log := server.Logger.ContextLoggingFn(c)
fileName, err := server.getChartFileName(log, repo, name, version)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": err.Message})
return
}
storageObject, err := server.getStorageObject(log, repo, fileName)
if err != nil {
c.JSON(err.Status, gin.H{"error": err.Message})
return
}
chrt, err1 := loader.LoadArchive(bytes.NewReader(storageObject.Content))
if err1 != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err1})
return
}
c.JSON(200, map[string]interface{}{
"templates": chrt.Templates,
"values": chrt.Values,
})
}
func (server *MultiTenantServer) getStorageObjectValuesRequestHandler(c *gin.Context) {
repo := c.Param("repo")
name := c.Param("name")
version := c.Param("version")
log := server.Logger.ContextLoggingFn(c)
fileName, err := server.getChartFileName(log, repo, name, version)
if err != nil {
c.JSON(http.StatusNotFound, gin.H{"error": err.Message})
return
}
storageObject, err := server.getStorageObject(log, repo, fileName)
if err != nil {
c.JSON(err.Status, gin.H{"error": err.Message})
return
}
chrt, err1 := loader.LoadArchive(bytes.NewReader(storageObject.Content))
if err1 != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err1})
return
}
var data []byte
for _, file := range chrt.Raw {
if file.Name == "values.yaml" {
data = file.Data
break
}
}
if data == nil {
c.JSON(http.StatusNotFound, gin.H{"error": "values.yaml not found"})
return
}
c.Data(200, "application/yaml", data)
}
func (server *MultiTenantServer) getAllChartsRequestHandler(c *gin.Context) {
repo := c.Param("repo")
offset := 0
offsetString, offsetExists := c.GetQuery("offset")
if offsetExists {
var convErr error
offset, convErr = strconv.Atoi(offsetString)
if convErr != nil || offset < 0 {
c.JSON(400, gin.H{"error": "offset is not a valid non-negative integer"})
return
}
}
limit := -1
limitString, limitExists := c.GetQuery("limit")
if limitExists {
var convErr error
limit, convErr = strconv.Atoi(limitString)
if convErr != nil || limit <= 0 {
c.JSON(400, gin.H{"error": "limit is not a valid positive integer"})
return
}
}
log := server.Logger.ContextLoggingFn(c)
allCharts, err := server.getAllCharts(log, repo, offset, limit)
if err != nil |
c.JSON(200, allCharts)
}
func (server *MultiTenantServer) getChartRequestHandler(c *gin.Context) {
repo := c.Param("repo")
name := c.Param("name")
log := server.Logger.ContextLoggingFn(c)
chart, err := server.getChart(log, repo, name)
if err != nil {
c.JSON(err.Status, gin.H{"error": err.Message})
return
}
c.JSON(200, chart)
}
func (server *MultiTenantServer) headChartRequestHandler(c *gin.Context) {
repo := c.Param("repo")
name := c.Param("name")
log := server.Logger.ContextLoggingFn(c)
_, err := server.getChart(log, repo, name)
if err != nil {
c.Status(err.Status)
return
}
c.Status(200)
}
func (server *MultiTenantServer) getChartVersionRequestHandler(c *gin.Context) {
repo := c.Param("repo")
name := c.Param("name")
version := c.Param("version")
log := server.Logger.ContextLoggingFn(c)
chartVersion, err := server.getChartVersion(log, repo, name, version)
if err != nil {
c.JSON(err.Status, gin.H{"error": err.Message})
return
}
c.JSON(200, chartVersion)
}
func (server *MultiTenantServer) headChartVersionRequestHandler(c *gin.Context) {
repo := c.Param("repo")
name := c.Param("name")
version := c.Param("version")
log := server.Logger.ContextLoggingFn(c)
_, err := server.getChartVersion(log, repo, name, version)
if err != nil {
c.Status(err.Status)
return
}
c.Status(200)
}
func (server *MultiTenantServer) deleteChartVersionRequestHandler(c *gin.Context) {
repo := c.Param("repo")
name := c.Param("name")
version := c.Param("version")
log := server.Logger.ContextLoggingFn(c)
err := server.deleteChartVersion(log, repo, name, version)
if err != nil {
c.JSON(err.Status, gin.H{"error": err.Message})
return
}
server.emitEvent(c, repo, deleteChart, &helm_repo.ChartVersion{
Metadata: &chart.Metadata{
Name: name,
Version: version,
},
// Since we only need name and version to delete the chart version from index
// left the others fields to be default
})
c.JSON(200, objectDeletedResponse)
}
func (server *MultiTenantServer) postRequestHandler(c *gin.Context) {
if c.ContentType() == "multipart/form-data" {
server.postPackageAndProvenanceRequestHandler(c) // new route handling form-based chart and/or prov files
} else {
server.postPackageRequestHandler(c) // classic binary data, chart package only route
}
}
func (server *MultiTenantServer) postPackageRequestHandler(c *gin.Context) {
repo := c.Param("repo")
content, getContentErr := c.GetRawData()
if getContentErr != nil {
if len(c.Errors) > 0 {
return // this is a "request too large"
}
c.JSON(500, gin.H{"error": fmt.Sprintf("%s", getContentErr)})
return
}
log := server.Logger.ContextLoggingFn(c)
_, force := c.GetQuery("force")
action := addChart
filename, err := server.uploadChartPackage(log, repo, content, force)
if err != nil {
// here should check both err.Status and err.Message
// The http.StatusConflict status means the chart is existed but overwrite is not sed OR chart is existed and overwrite is set
// err.Status == http.StatusConflict only denotes for chart is existed now.
if err.Status == http.StatusConflict {
if err.Message != "" {
c.JSON(err.Status, gin.H{"error": err.Message})
return
}
action = updateChart
} else {
c.JSON(err.Status, gin.H{"error": err.Message})
return
}
}
chart, chartErr := cm_repo.ChartVersionFromStorageObject(cm_storage.Object{
Path: pathutil.Join(repo, filename),
Content: content,
LastModified: time.Now()})
if chartErr != nil {
log(cm_logger.ErrorLevel, "cannot get chart from content", zap.Error(chartErr), zap.Binary("content", content))
}
server.emitEvent(c, repo, action, chart)
c.JSON(201, objectSavedResponse)
}
// TODO: whether need update cache
func (server *MultiTenantServer) postProvenanceFileRequestHandler(c *gin.Context) {
repo := c.Param("repo")
content, getContentErr := c.GetRawData()
if getContentErr != nil {
if len(c.Errors) > 0 {
return // this is a "request too large"
}
c.JSON(500, gin.H{"error": fmt.Sprintf("%s", getContentErr)})
return
}
log := server.Logger.ContextLoggingFn(c)
_, force := c.GetQuery("force")
err := server.uploadProvenanceFile(log, repo, content, force)
if err != nil {
c.JSON(err.Status, gin.H{"error": err.Message})
return
}
c.JSON(201, objectSavedResponse)
}
func (server *MultiTenantServer) postPackageAndProvenanceRequestHandler(c *gin.Context) {
log := server.Logger.ContextLoggingFn(&gin.Context{})
repo := c.Param("repo")
_, force := c.GetQuery("force")
var chartContent []byte
var path string
// action used to determine what operation to emit
action := addChart
cpFiles, status, err := server.getChartAndProvFiles(c.Request, repo, force)
if err != nil {
c.JSON(status, gin.H{"error": fmt.Sprintf("%s", err)})
return
}
switch status {
case http.StatusOK:
case http.StatusConflict:
if !server.AllowOverwrite && (!server.AllowForceOverwrite || !force) {
c.JSON(status, gin.H{"error": fmt.Sprintf("%s", fmt.Errorf("chart already exists"))}) // conflict
return
}
log(cm_logger.DebugLevel, "chart already exists, but overwrite is allowed", zap.String("repo", repo))
// update chart if chart already exists and overwrite is allowed
action = updateChart
default:
c.JSON(status, gin.H{"error": fmt.Sprintf("%s", err)})
return
}
if len(cpFiles) == 0 {
if len(c.Errors) > 0 {
return // this is a "request too large"
}
c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf(
"no package or provenance file found in form fields %s and %s",
server.ChartPostFormFieldName, server.ProvPostFormFieldName),
})
return
}
// At this point input is presumed valid, we now proceed to store it
// Undo transaction if there is an error
var storedFiles []*chartOrProvenanceFile
for _, ppf := range cpFiles {
server.Logger.Debugc(c, "Adding file to storage (form field)",
"filename", ppf.filename,
"field", ppf.field,
)
err := server.StorageBackend.PutObject(pathutil.Join(repo, ppf.filename), ppf.content)
if err == nil {
storedFiles = append(storedFiles, ppf)
} else {
// Clean up what's already been saved
for _, ppf := range storedFiles {
server.StorageBackend.DeleteObject(ppf.filename)
}
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("%s", err)})
return
}
if ppf.field == defaultFormField {
// find the content of chart
chartContent = ppf.content
path = pathutil.Join(repo, ppf.filename)
}
}
chart, chartErr := cm_repo.ChartVersionFromStorageObject(cm_storage.Object{
Path: path,
Content: chartContent,
LastModified: time.Now()})
if chartErr != nil {
log(cm_logger.ErrorLevel, "cannot get chart from content", zap.Error(err), zap.Binary("content", chartContent))
}
server.emitEvent(c, repo, action, chart)
c.JSON(http.StatusCreated, objectSavedResponse)
}
func (server *MultiTenantServer) getChartAndProvFiles(req *http.Request, repo string, force bool) (map[string]*chartOrProvenanceFile, int, error) {
type fieldFuncPair struct {
field string
fn filenameFromContentFn
}
ffp := []fieldFuncPair{
{defaultFormField, cm_repo.ChartPackageFilenameFromContent},
{server.ChartPostFormFieldName, cm_repo.ChartPackageFilenameFromContent},
{defaultProvField, cm_repo.ProvenanceFilenameFromContent},
{server.ProvPostFormFieldName, cm_repo.ProvenanceFilenameFromContent},
}
validReturnStatusCode := http.StatusOK
cpFiles := make(map[string]*chartOrProvenanceFile)
for _, ff := range ffp {
content, err := extractContentFromRequest(req, ff.field)
if err != nil {
return nil, http.StatusInternalServerError, err
}
if content == nil {
continue
}
filename, err := ff.fn(content)
if err != nil {
return nil, http.StatusBadRequest, err
}
if _, ok := cpFiles[filename]; ok {
continue
}
// if the file already exists, we don't need to validate it again
if validReturnStatusCode == http.StatusConflict {
cpFiles[filename] = &chartOrProvenanceFile{filename, content, ff.field}
continue
}
// check filename
if pathutil.Base(filename) != filename {
return nil, http.StatusBadRequest, fmt.Errorf("%s is improperly formatted", filename) // Name wants to break out of current directory
}
// check existence
status, err := server.validateChartOrProv(repo, filename, force)
if err != nil {
return nil, status, err
}
// return conflict status code if the file already exists
if status == http.StatusConflict {
validReturnStatusCode = status
}
cpFiles[filename] = &chartOrProvenanceFile{filename, content, ff.field}
}
// validState code can be 200 or 409. Returning 409 means that the chart already exists
return cpFiles, validReturnStatusCode, nil
}
func extractContentFromRequest(req *http.Request, field string) ([]byte, error) {
file, header, _ := req.FormFile(field)
if file == nil || header == nil {
return nil, nil // field is not present
}
buf := bytes.NewBuffer(nil)
_, err := io.Copy(buf, file)
if err != nil {
return nil, err // IO error
}
return buf.Bytes(), nil
}
func (server *MultiTenantServer) validateChartOrProv(repo, filename string, force bool) (int, error) {
var f string
if repo == "" {
f = filename
} else {
f = repo + "/" + filename
}
// conflict does not mean the file is invalid.
// for example, when overwite is allowed, it's valid
// so that the client can decide what to do and here we just return conflict with no error
if _, err := server.StorageBackend.GetObject(f); err == nil {
return http.StatusConflict, nil
}
return http.StatusOK, nil
}
| {
c.JSON(err.Status, gin.H{"error": err.Message})
return
} |
page-script.js | /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId]) {
/******/ return installedModules[moduleId].exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // define getter function for harmony exports
/******/ __webpack_require__.d = function(exports, name, getter) {
/******/ if(!__webpack_require__.o(exports, name)) {
/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
/******/ }
/******/ };
/******/
/******/ // define __esModule on exports
/******/ __webpack_require__.r = function(exports) {
/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
/******/ }
/******/ Object.defineProperty(exports, '__esModule', { value: true });
/******/ };
/******/
/******/ // create a fake namespace object
/******/ // mode & 1: value is a module id, require it
/******/ // mode & 2: merge all properties of value into the ns
/******/ // mode & 4: return value when already ns object
/******/ // mode & 8|1: behave like require
/******/ __webpack_require__.t = function(value, mode) {
/******/ if(mode & 1) value = __webpack_require__(value);
/******/ if(mode & 8) return value;
/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
/******/ var ns = Object.create(null);
/******/ __webpack_require__.r(ns);
/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
/******/ return ns;
/******/ };
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ __webpack_require__.n = function(module) {
/******/ var getter = module && module.__esModule ?
/******/ function getDefault() { return module['default']; } :
/******/ function getModuleExports() { return module; };
/******/ __webpack_require__.d(getter, 'a', getter);
/******/ return getter;
/******/ };
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(__webpack_require__.s = "./src/page-script.js");
/******/ })
/************************************************************************/
/******/ ({
/***/ "../couli/dist/couli.js":
/*!******************************!*\
!*** ../couli/dist/couli.js ***!
\******************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
(function webpackUniversalModuleDefinition(root, factory) {
if(true)
module.exports = factory();
else {}
})(this, function() {
return /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId]) {
/******/ return installedModules[moduleId].exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // define getter function for harmony exports
/******/ __webpack_require__.d = function(exports, name, getter) {
/******/ if(!__webpack_require__.o(exports, name)) {
/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
/******/ }
/******/ };
/******/
/******/ // define __esModule on exports
/******/ __webpack_require__.r = function(exports) {
/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
/******/ }
/******/ Object.defineProperty(exports, '__esModule', { value: true });
/******/ };
/******/
/******/ // create a fake namespace object
/******/ // mode & 1: value is a module id, require it
/******/ // mode & 2: merge all properties of value into the ns
/******/ // mode & 4: return value when already ns object
/******/ // mode & 8|1: behave like require
/******/ __webpack_require__.t = function(value, mode) {
/******/ if(mode & 1) value = __webpack_require__(value);
/******/ if(mode & 8) return value;
/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
/******/ var ns = Object.create(null);
/******/ __webpack_require__.r(ns);
/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
/******/ return ns;
/******/ };
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ __webpack_require__.n = function(module) {
/******/ var getter = module && module.__esModule ?
/******/ function getDefault() { return module['default']; } :
/******/ function getModuleExports() { return module; };
/******/ __webpack_require__.d(getter, 'a', getter);
/******/ return getter;
/******/ };
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(__webpack_require__.s = "./src/main.js");
/******/ })
/************************************************************************/
/******/ ({
/***/ "./src/Definition.js":
/*!***************************!*\
!*** ./src/Definition.js ***!
\***************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.RESERVED_BINDING_NAMES = exports.VALUE_TYPES = exports.isComponent = exports.shortenBindingId = exports.getComponentOpts = exports.define = undefined;
var _attributes = __webpack_require__(/*! ./globals/attributes */ "./src/globals/attributes.js");
var _attributes2 = _interopRequireDefault(_attributes);
var _regexp = __webpack_require__(/*! ./globals/regexp */ "./src/globals/regexp.js");
var _regexp2 = _interopRequireDefault(_regexp);
var _dom = __webpack_require__(/*! ./helpers/dom */ "./src/helpers/dom.js");
var _object = __webpack_require__(/*! ./helpers/object */ "./src/helpers/object.js");
var _copy = __webpack_require__(/*! ./helpers/copy */ "./src/helpers/copy.js");
var _copy2 = _interopRequireDefault(_copy);
var _checkers = __webpack_require__(/*! ./helpers/checkers */ "./src/helpers/checkers.js");
var _common = __webpack_require__(/*! ./helpers/common */ "./src/helpers/common.js");
var _State = __webpack_require__(/*! ./State */ "./src/State.js");
var _View = __webpack_require__(/*! ./View */ "./src/View.js");
var _ComponentRedefineError = __webpack_require__(/*! ./errors/ComponentRedefineError */ "./src/errors/ComponentRedefineError.js");
var _ComponentRedefineError2 = _interopRequireDefault(_ComponentRedefineError);
var _ScopeNameCollisionError = __webpack_require__(/*! ./errors/ScopeNameCollisionError */ "./src/errors/ScopeNameCollisionError.js");
var _ScopeNameCollisionError2 = _interopRequireDefault(_ScopeNameCollisionError);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
var VALUE_TYPES = ['style', 'class', 'value', 'attrs', 'html'];
var RESERVED_HOOKS_NAMES = ['mount', 'update', 'remove'];
var DEFAULT_HOOKS = (0, _object.toObject)(RESERVED_HOOKS_NAMES, function () {
return function () {};
});
var RESERVED_BINDING_NAMES = VALUE_TYPES.concat(['events', 'hooks', 'listItem', _attributes2.default.SELF]);
exports.define = define;
exports.getComponentOpts = getComponentOpts;
exports.shortenBindingId = shortenBindingId;
exports.isComponent = isComponent;
exports.VALUE_TYPES = VALUE_TYPES;
exports.RESERVED_BINDING_NAMES = RESERVED_BINDING_NAMES;
var COMPONENT_COUNTER = 0;
var COMPONENTS = {};
function define(name, markup, bindings, styles) {
var args = (0, _checkers.isObject)(name) ? name : { name: name, markup: markup, bindings: bindings, styles: styles };
name = (0, _common.toCamelCase)(args.name);
if (COMPONENTS[name]) {
throw new _ComponentRedefineError2.default(name);
}
var componentHTMLMarkup = (0, _dom.cloneHTMLMarkup)(args.markup);
componentHTMLMarkup.classList.add(name);
COMPONENT_COUNTER = 0;
var component = {
name: name,
state: _defineProperty({}, _attributes2.default.SELF, {}),
stateId: name,
statePath: [name],
stateNames: {},
markup: componentHTMLMarkup,
evaluate: {},
links: {},
isComponent: true,
outerNames: {},
_links: {}
};
gatherBindingsFromMarkup(componentHTMLMarkup, component);
var normalizedOptions = normalizeUserOptions(args.bindings, { state: {} });
(0, _copy2.default)(component, normalizedOptions);
prepareBindings(component, component.stateId, { statePath: [], links: {}, stateNames: component.stateNames });
(0, _View.createAndAppendStyles)(prepareStyles(args.styles, component));
return COMPONENTS[name] = component;
}
function gatherBindingsFromMarkup(componentHTMLMarkup, component) {
(0, _dom.walkNodes)(componentHTMLMarkup, function (HTMLNode) {
var bindingOpts = analyzeBinding(HTMLNode);
if (!bindingOpts) {
return;
}
var binding = createBinding(bindingOpts.name, component, HTMLNode);
if (bindingOpts.isComponent) {
setComponent(binding, bindingOpts);
HTMLNode.classList.add(_attributes2.default.PREFIX + binding.id);
}
component.state[binding.name] = binding;
binding.markup.classList.add(_attributes2.default.PREFIX + binding.id, component.name + '-' + binding.name);
if (bindingOpts.isList) {
var itemNode = HTMLNode.children[0];
modifyToListBinding(binding, itemNode);
if (isComponent(itemNode)) {
setComponent(binding.listItem, getComponentOpts(itemNode));
return -1;
}
gatherBindingsFromMarkup(itemNode, binding.listItem);
return -1;
}
});
return component;
}
function setComponent(componentBinding, subComponentOpts) {
var stateNames = Object.assign(componentBinding.stateNames, subComponentOpts.component.stateNames);
var stateId = subComponentOpts.component.name + 'x' + COMPONENT_COUNTER++;
if (subComponentOpts.stateName) {
if (stateNames[subComponentOpts.stateName]) {
throw new _ScopeNameCollisionError2.default(subComponentOpts.stateName);
}
stateId = subComponentOpts.stateName;
componentBinding.stateNames[stateId] = true;
}
(0, _copy2.default)(componentBinding, subComponentOpts.component);
return Object.assign(componentBinding, {
id: stateId,
stateId: stateId,
name: stateId,
stateName: subComponentOpts.stateName,
statePath: componentBinding.statePath.concat(stateId),
markup: subComponentOpts.component.markup.cloneNode(true),
isComponent: true,
stateNames: stateNames,
_links: Object.assign({}, subComponentOpts.links),
outerNames: Object.assign({}, subComponentOpts.revLinks)
});
}
function prepareBindings(component, currentStateId, parentComponent) {
var componentWrapper = document.createElement('div');
component.markup.setAttribute(_attributes2.default.BINDING_ID, shortenBindingId(currentStateId + _attributes2.default.STATE_DELIMITER + _attributes2.default.SELF));
componentWrapper.appendChild(component.markup);
component.statePath = parentComponent.statePath.concat(component.stateId);
component.stateNames = parentComponent.stateNames;
var subComponents = [];
(0, _object.forEach)(component.state, function (binding, bindingName) {
var oldBindingId = binding.id;
var newBindingId = currentStateId + _attributes2.default.STATE_DELIMITER + bindingName;
var shortId = shortenBindingId(newBindingId);
if (binding.markup) {
var bindingNode = componentWrapper.querySelector('.' + _attributes2.default.PREFIX + oldBindingId);
bindingNode.setAttribute(_attributes2.default.BINDING_ID, shortId);
bindingNode.classList.remove(_attributes2.default.PREFIX + oldBindingId);
bindingNode.classList.add(_attributes2.default.PREFIX + newBindingId);
}
Object.assign(binding, {
statePath: component.statePath.slice(),
hooks: Object.assign({}, DEFAULT_HOOKS, binding.hooks),
id: newBindingId,
shortId: shortId
});
prepareReactiveFuncs(binding, component);
if (binding.isList) {
binding.statePath.push(bindingName);
binding.stateNames = component.stateNames;
prepareBindings(binding.listItem, newBindingId + _attributes2.default.STATE_DELIMITER + _attributes2.default.ITEM + binding.listItem.name, binding);
return;
}
if (binding.isComponent) {
subComponents.push(binding);
}
});
setupComponentsLinks(component, parentComponent);
component.template = component.markup.cloneNode(true);
subComponents.forEach(function (subComponent) {
prepareBindings(subComponent, currentStateId + _attributes2.default.STATE_DELIMITER + subComponent.stateId, component);
});
}
function prepareReactiveFuncs(binding, componentData) {
VALUE_TYPES.forEach(function (type) {
var reactiveFunc = binding[type];
if (!reactiveFunc) {
return;
}
binding.evaluate[type] = prepareReactiveFunc(binding, type, reactiveFunc, componentData);
});
}
function prepareReactiveFunc(binding, type, reactiveFunc, componentData) {
var dependenciesNames = getDependenciesNames(reactiveFunc);
dependenciesNames.forEach(function (dependencyName) {
if ((0, _checkers.isEmpty)(componentData.state[dependencyName])) {
componentData.state[dependencyName] = createBinding(dependencyName, componentData);
}
componentData.state[dependencyName].dependants[binding.name + ':' + type] = { name: binding.name, type: type };
});
return function (values, componentInterface) {
return compute(reactiveFunc, values, componentInterface);
};
}
function getDependenciesNames(func) {
var funcParams = getParamNames(func);
var valuesObjRegExp = new RegExp('\\' + funcParams[0] + '\\.(\\D[^\\s\\W]+)', 'g');
var dependenciesNames = [];
var funcStr = func.toString();
var dependencyName = void 0;
while (dependencyName = valuesObjRegExp.exec(funcStr)) {
dependenciesNames.push(dependencyName[1]);
}
return dependenciesNames;
}
function getParamNames(func) {
var funcStrWithoutComments = func.toString().replace(_regexp2.default.STRIP_COMMENTS, '');
var paramsStr = funcStrWithoutComments.slice(funcStrWithoutComments.indexOf('(') + 1, funcStrWithoutComments.indexOf(')'));
return paramsStr.match(_regexp2.default.ARGUMENT_NAMES) || [];
}
function compute(func, valuesObj, componentInterface) {
return func.call(this, (0, _State.getOnlyValues)(valuesObj), componentInterface);
}
function setupComponentsLinks(component, parentComponent) {
Object.assign(parentComponent.links, (0, _object.map)(component.outerNames, function (k) {
return { link: k, component: component.stateId };
}));
}
function normalizeUserOptions(optionsObj, parentObj, parentKey) {
if ((0, _checkers.isFunction)(optionsObj)) {
return (0, _object.set)(parentObj, [parentKey], { class: optionsObj });
}
if ((0, _checkers.isArray)(optionsObj)) {
if (optionsObj.length > 1) {
optionsObj.forEach(function (option) {
return normalizeUserOptions(option, parentObj, parentKey);
});
return;
}
if ((0, _checkers.isFunction)(optionsObj[0])) {
return (0, _object.set)(parentObj, [parentKey], { value: optionsObj[0] });
}
if ((0, _checkers.isObject)(optionsObj[0])) {
return (0, _object.set)(parentObj, [parentKey], { events: optionsObj[0] });
}
}
(0, _object.forEach)(optionsObj, function (value, key) {
delete optionsObj[key];
var binding = createBinding(key);
if ((0, _checkers.isObject)(value)) {
parentObj['state'][key] = Object.assign(binding, value);
return;
}
parentObj['state'][key] = binding;
normalizeUserOptions(value, parentObj['state'], key);
});
return parentObj;
}
function analyzeBinding(el) {
if (isComponent(el)) {
return getComponentOpts(el);
}
if (!el.getAttribute) {
return null;
}
var name = el.getAttribute(_attributes2.default.TEMPLATE_BINDING);
el.removeAttribute(_attributes2.default.TEMPLATE_BINDING);
if (name) {
return { name: name.trim() };
}
name = el.getAttribute(_attributes2.default.TEMPLATE_LIST_BINDING);
el.removeAttribute(_attributes2.default.TEMPLATE_LIST_BINDING);
if (name) {
return { name: name.trim(), isList: true };
}
}
function isComponent(el) {
var tagName = el.tagName && el.tagName.toLowerCase();
return tagName && COMPONENTS[(0, _common.toCamelCase)(tagName)];
}
function getComponentOpts(obj) {
var name = (0, _common.toCamelCase)(obj.tagName.toLowerCase());
var component = getComponentByName(name);
var links = {};
var revLinks = {};
var stateName = null;
Array.prototype.slice.call(obj.attributes).forEach(function (attr) {
if (attr.name === _attributes2.default.STATE_NAME) {
stateName = attr.value;
return;
}
var innerLink = (0, _common.toCamelCase)(attr.name);
links[innerLink] = attr.value;
revLinks[attr.value] = innerLink;
});
return {
component: component,
links: links,
revLinks: revLinks,
stateName: stateName,
name: name,
isComponent: true
};
}
function getComponentByName(name) {
return COMPONENTS[(0, _common.toCamelCase)(name)];
}
function | (name, component, el) {
var componentOpts = component ? {
id: component.stateId + _attributes2.default.STATE_DELIMITER + name,
statePath: component.statePath,
stateNames: component.stateNames,
isListItem: component.isList,
stateId: name
} : {};
var elOpts = el ? {
initValue: el.value || el.innerHTML
} : {};
return Object.assign({
name: name,
listeners: [],
markup: el,
dependants: {},
events: {},
evaluate: {},
links: {},
state: _defineProperty({}, _attributes2.default.SELF, {}),
outerNames: {},
_links: {}
}, componentOpts, elOpts);
}
function modifyToListBinding(binding, itemMarkup) {
binding.isList = true;
return Object.assign(binding, {
markup: itemMarkup,
listItem: createBinding(itemMarkup.tagName, binding, itemMarkup)
});
}
function shortenBindingId(id) {
return id.split(_attributes2.default.STATE_DELIMITER).map(function (el) {
return (0, _common.getShortName)(el);
}).join(_attributes2.default.STATE_DELIMITER);
}
function prepareStyles(styleArg, component) {
return (0, _object.mapKeys)(styleArg, function (key) {
return key.split(',').map(function (selector) {
return selector.trim();
}).map(function (selector) {
var className = '.' + component.name;
if (selector) {
var isBinding = component.state[selector.split(' ')[0].split('::')[0].split(':')[0]];
var prefix = isBinding ? className + '-' : '';
className += ' ' + prefix + selector;
}
return className;
}).join(',');
});
}
/***/ }),
/***/ "./src/EventHandler.js":
/*!*****************************!*\
!*** ./src/EventHandler.js ***!
\*****************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.setupEventHandlers = undefined;
var _attributes = __webpack_require__(/*! ./globals/attributes */ "./src/globals/attributes.js");
var _attributes2 = _interopRequireDefault(_attributes);
var _object = __webpack_require__(/*! ./helpers/object */ "./src/helpers/object.js");
var _common = __webpack_require__(/*! ./helpers/common */ "./src/helpers/common.js");
var _State = __webpack_require__(/*! ./State */ "./src/State.js");
var _StateChange = __webpack_require__(/*! ./StateChange */ "./src/StateChange.js");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.setupEventHandlers = setupEventHandlers;
function setupEventHandlers(element) {
var eventHandlers = {};
gatherAllEventHandlers(element, eventHandlers);
var markup = element.el;
var _loop = function _loop(eventName) {
markup.addEventListener(eventName, function (e) {
return executeAllCallbacksInList(e, eventHandlers[eventName], element);
});
};
for (var eventName in eventHandlers) {
_loop(eventName);
}
}
function gatherAllEventHandlers(component, gatheredHandlers) {
gatherEventHandlers(component, gatheredHandlers);
if (component.state) {
(0, _object.forEach)(component.state, function (binding) {
return gatherAllEventHandlers(binding, gatheredHandlers);
});
}
if (component.listItem) {
gatherAllEventHandlers(component.listItem, gatheredHandlers);
}
}
function gatherEventHandlers(binding, gatheredHandlers) {
if (!binding.events) {
return;
}
for (var eventName in binding.events) {
if (!gatheredHandlers[eventName]) {
gatheredHandlers[eventName] = {};
}
gatheredHandlers[eventName][binding.shortId] = binding.events[eventName];
}
}
function executeAllCallbacksInList(e, eventHandlers, element) {
decorateEvent(e);
(0, _StateChange.startTransaction)();
var curHTMLNode = e.target;
while (curHTMLNode !== element.el.parentNode) {
var bindingId = curHTMLNode.getAttribute(_attributes2.default.BINDING_ID);
var eventHandler = eventHandlers[bindingId];
if (eventHandler) {
var indexlessStatePath = bindingId.split(_attributes2.default.STATE_DELIMITER);
indexlessStatePath.pop();
var statePathToItem = getStatePathToItem(curHTMLNode, indexlessStatePath);
var accessorToData = (0, _State.createAccessor)([element.id].concat(statePathToItem));
eventHandler.call(this, e, curHTMLNode, accessorToData, +statePathToItem.slice(-2)[0]);
if (e.propagationStopped) {
break;
}
}
curHTMLNode = curHTMLNode.parentNode;
}
(0, _StateChange.applyChanges)();
}
function decorateEvent(e) {
var stopPropagation = e.stopPropagation.bind(e);
e.stopPropagation = function () {
e.propagationStopped = true;
stopPropagation();
};
}
function getStatePathToItem(el, indexlessStatePath) {
var statePath = [];
var elementName = void 0;
while (elementName = (0, _common.getRealName)(indexlessStatePath.pop())) {
if ((0, _common.has)(elementName, _attributes2.default.ITEM)) {
elementName = elementName.slice(_attributes2.default.ITEM.length);
el = getToItemNode(el);
var idx = el.getAttribute(_attributes2.default.ITEM_INDEX);
el = el.parentNode;
statePath.unshift(idx, elementName);
continue;
}
statePath.unshift(elementName);
}
return statePath;
}
function getToItemNode(el) {
var curEl = el;
while (curEl.tagName !== 'BODY') {
if (curEl.getAttribute(_attributes2.default.ITEM_INDEX)) {
return curEl;
}
curEl = curEl.parentNode;
}
}
/***/ }),
/***/ "./src/Production.js":
/*!***************************!*\
!*** ./src/Production.js ***!
\***************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.apply = undefined;
var _dom = __webpack_require__(/*! ./helpers/dom */ "./src/helpers/dom.js");
var _Definition = __webpack_require__(/*! ./Definition */ "./src/Definition.js");
var _EventHandler = __webpack_require__(/*! ./EventHandler */ "./src/EventHandler.js");
var _State = __webpack_require__(/*! ./State */ "./src/State.js");
exports.apply = apply;
function apply(rootElementSelector) {
var rootElement = document.querySelector(rootElementSelector);
var HTMLNodes = (0, _dom.collectHTMLNodes)(rootElement, _Definition.isComponent);
HTMLNodes.forEach(function (HTMLNode) {
var componentOpts = (0, _Definition.getComponentOpts)(HTMLNode);
var element = setupElement(componentOpts);
(0, _dom.replaceNodes)(HTMLNode, element.el);
});
}
function setupElement(componentOpts) {
var element = (0, _State.createElement)(componentOpts);
(0, _EventHandler.setupEventHandlers)(element);
return element;
}
/***/ }),
/***/ "./src/State.js":
/*!**********************!*\
!*** ./src/State.js ***!
\**********************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.prepareChangeObject = exports.setState = exports.getComponent = exports.getOnlyValues = exports.createAccessor = exports.createElement = undefined;
var _attributes = __webpack_require__(/*! ./globals/attributes */ "./src/globals/attributes.js");
var _attributes2 = _interopRequireDefault(_attributes);
var _object = __webpack_require__(/*! ./helpers/object */ "./src/helpers/object.js");
var _common = __webpack_require__(/*! ./helpers/common */ "./src/helpers/common.js");
var _Definition = __webpack_require__(/*! ./Definition */ "./src/Definition.js");
var _StateChange = __webpack_require__(/*! ./StateChange */ "./src/StateChange.js");
var _checkers = __webpack_require__(/*! ./helpers/checkers */ "./src/helpers/checkers.js");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
exports.createElement = createElement;
exports.createAccessor = createAccessor;
exports.getOnlyValues = getOnlyValues;
exports.getComponent = getComponent;
exports.setState = setState;
exports.prepareChangeObject = prepareChangeObject;
var ELEMENTS = {};
var STATE = {};
var ELEMENT_COUNTER = 1;
function setState(path, value) {
(0, _object.set)(STATE, path, value);
}
function getState(path) {
return (0, _object.get)(STATE, path);
}
function createElement(componentOpts) {
var id = ELEMENT_COUNTER++;
var name = componentOpts.name;
var element = ELEMENTS[id] = {
id: id,
state: _defineProperty({}, name, componentOpts.component)
};
STATE[id] = {};
(0, _StateChange.createStateNodes)([id, name]);
(0, _StateChange.sendToRenderQueue)([id], _defineProperty({}, name, false));
(0, _StateChange.applyChanges)();
element.el = STATE[id][name][_attributes2.default.SELF].el;
element.el.setAttribute(_attributes2.default.COMPONENT_TYPE, _attributes2.default.BASE);
return element;
}
function createAccessor(path) {
var values = getState(path);
var component = getComponent(path);
var accessor = {
component: component,
values: values,
path: path,
startTransaction: _StateChange.startTransaction,
applyChanges: _StateChange.applyChanges,
up: function up(level) {
return moveUpStatePath(level, path.slice());
},
down: function down(additionalPath) {
return createAccessor(path.concat(additionalPath));
},
set: function set(changeObj) {
return (0, _StateChange.setValues)(prepareChangeObject(changeObj), path);
},
get: function get(key) {
return getValues(accessor, key, path);
},
markup: function markup(key) {
return key ? accessor.values[key].el : accessor.values[_attributes2.default.SELF].el;
}
};
if ((0, _checkers.isArray)(values)) {
modifyToArrayAccessor(accessor);
}
return accessor;
}
function prepareChangeObject(changeObj) {
return (0, _object.map)(changeObj, function (v) {
var forced = (0, _checkers.isObject)(v) && Object.keys(v)[0] === 'force';
return {
value: forced ? v['force'] : v,
type: 'value',
force: forced
};
});
}
function getComponent(path) {
var component = ELEMENTS[path[0]];
for (var i = 1; i < path.length; i++) {
component = component.state[path[i]];
if (component.isList && i !== path.length - 1) {
component = component.listItem;
i += 2;
}
}
return component;
}
function getOnlyValues(obj) {
return (0, _object.map)(obj, function (bindingValues) {
return bindingValues['value'];
});
}
function getValues(accessor, key, statePath) {
var values = accessor.values;
var component = accessor.component;
if (!key) {
return getValuesTree(values, component, {});
}
var binding = component.state[key];
if (binding.isList || binding.isComponent) {
return createAccessor(statePath.concat(key));
}
return values[key].value;
}
function getValuesTree(values, component, valuesTree) {
(0, _object.forEach)(values, function (vals, bindingName) {
if ((0, _common.has)(_Definition.RESERVED_BINDING_NAMES, bindingName)) {
return;
}
var binding = component.state[bindingName];
if (binding.isList) {
valuesTree[bindingName] = [];
values[bindingName].forEach(function (el, i) {
var itemValues = valuesTree[bindingName][i] = {};
getValuesTree(values[bindingName][i][binding.listItem.name], binding.listItem, itemValues);
});
return;
}
if (binding.isComponent) {
var componentValues = valuesTree[bindingName] = {};
getValuesTree(values[bindingName], binding, componentValues);
}
valuesTree[component._links[bindingName] || bindingName] = vals.value;
});
return valuesTree;
}
function moveUpStatePath(level, statePath) {
var lastIdx = statePath.length;
if (level === '') {
return createAccessor(statePath.slice(0, 2));
}
if ((0, _checkers.isString)(level)) {
while (lastIdx--) {
if (statePath[lastIdx] === level) {
break;
}
}
return createAccessor(statePath.slice(0, lastIdx + 1));
}
level = level || 1;
while (lastIdx-- && level--) {
if ((0, _checkers.isNumber)(+statePath[lastIdx - 1])) {
lastIdx--;
}
}
return createAccessor(statePath.slice(0, lastIdx + 1));
}
function modifyToArrayAccessor(accessor) {
Object.assign(accessor, {
get: function get(num) {
return createAccessor(accessor.path.concat(num, accessor.component.listItem.name));
},
push: function push(els) {
return (0, _StateChange.modifyList)('add', { start: accessor.values.length, els: els }, accessor);
},
unshift: function unshift(els) {
return (0, _StateChange.modifyList)('add', { start: 0, els: els }, accessor);
},
add: function add(start, els) {
return (0, _StateChange.modifyList)('add', { start: start, els: els }, accessor);
},
pop: function pop(num) {
return (0, _StateChange.modifyList)('remove', { start: accessor.values.length - (num || 1), num: num }, accessor);
},
shift: function shift(num) {
return (0, _StateChange.modifyList)('remove', { start: 0, num: num }, accessor);
},
remove: function remove(start, num) {
return (0, _StateChange.modifyList)('remove', { start: start, num: num }, accessor);
},
length: function length() {
return accessor.values.length;
},
value: function value() {
return mapList(accessor);
},
forEach: function forEach(cb) {
return iterateListValues(accessor, cb);
},
filter: function filter(cb) {
return filterList(accessor, cb);
},
map: function map(cb) {
return mapList(accessor, cb);
}
});
}
function iterateListValues(accessor, cb) {
accessor.values.forEach(function (el, i) {
return cb(getValuesTree(el[accessor.component.listItem.name], accessor.component.listItem, {}), i);
});
}
function filterList(accessor, cb) {
var newList = [];
iterateListValues(accessor, function (el, i) {
if (cb(el, i)) {
newList.push(el);
}
});
return newList;
}
function mapList(accessor, cb) {
var newList = [];
iterateListValues(accessor, function (el, i) {
return newList.push(cb ? cb(el, i) : el);
});
return newList;
}
/***/ }),
/***/ "./src/StateChange.js":
/*!****************************!*\
!*** ./src/StateChange.js ***!
\****************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.sendToRenderQueue = exports.applyChanges = exports.startTransaction = exports.modifyList = exports.createStateNodes = exports.setValues = undefined;
var _attributes = __webpack_require__(/*! ./globals/attributes */ "./src/globals/attributes.js");
var _attributes2 = _interopRequireDefault(_attributes);
var _object = __webpack_require__(/*! ./helpers/object */ "./src/helpers/object.js");
var _checkers = __webpack_require__(/*! ./helpers/checkers */ "./src/helpers/checkers.js");
var _equality = __webpack_require__(/*! ./helpers/equality */ "./src/helpers/equality.js");
var _equality2 = _interopRequireDefault(_equality);
var _common = __webpack_require__(/*! ./helpers/common */ "./src/helpers/common.js");
var _State = __webpack_require__(/*! ./State */ "./src/State.js");
var _View = __webpack_require__(/*! ./View */ "./src/View.js");
var _Definition = __webpack_require__(/*! ./Definition */ "./src/Definition.js");
var _BindingNotExistsError = __webpack_require__(/*! ./errors/BindingNotExistsError */ "./src/errors/BindingNotExistsError.js");
var _BindingNotExistsError2 = _interopRequireDefault(_BindingNotExistsError);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
exports.setValues = setValues;
exports.createStateNodes = createStateNodes;
exports.modifyList = modifyList;
exports.startTransaction = startTransaction;
exports.applyChanges = applyChanges;
exports.sendToRenderQueue = sendToRenderQueue;
var CHANGES = { changes: {}, collecting: false };
var PROMISES_RESOLVES = [];
var LIFE_CYCLE_HANDLERS = { list: [] };
function startTransaction() {
applyChanges();
CHANGES.collecting = true;
}
function applyChanges() {
var changes = CHANGES.changes;
CHANGES.changes = {};
(0, _View.renderChanges)(changes);
return runLifeCycleHooks();
}
function runLifeCycleHooks() {
var lifeCycleHandlers = LIFE_CYCLE_HANDLERS.list;
LIFE_CYCLE_HANDLERS.list = [];
lifeCycleHandlers.forEach(function (h) {
return h();
});
if (!(0, _checkers.isEmpty)(CHANGES.changes)) {
return applyChanges();
}
return finalizeTransaction();
}
function finalizeTransaction() {
CHANGES.collecting = false;
var resolvePromise = void 0;
while (resolvePromise = PROMISES_RESOLVES.pop()) {
resolvePromise(true);
}
return Promise.resolve(true);
}
function sendToRenderQueue(path, change) {
(0, _object.set)(CHANGES.changes, path, change);
}
function isCollectingChanges() {
return CHANGES.collecting;
}
function addLifeCycleHook(hookType, binding, values, accessor, idx) {
var hook = binding.hooks[hookType];
LIFE_CYCLE_HANDLERS.list.push(function () {
var el = values[binding.name || _attributes2.default.SELF].el;
var vals = !binding.name ? (0, _State.getOnlyValues)(values) : values[binding.name].value;
hook(el, vals, accessor, idx);
});
}
function createStateNodes(statePath) {
var component = (0, _State.getComponent)(statePath);
var valuesNode = createStateNode(component);
(0, _State.setState)(statePath, valuesNode);
(0, _object.forEach)(component.state, function (binding, bindingName) {
if (binding.isComponent) {
createStateNodes(statePath.concat(bindingName));
}
});
var accessor = (0, _State.createAccessor)(statePath);
addLifeCycleHook('mount', component.state[_attributes2.default.SELF], valuesNode, accessor, statePath.slice(-2)[0]);
return valuesNode;
}
function createStateNode(component) {
var valuesNodes = (0, _object.map)(component.state, function (binding) {
var valueNode = binding.isList ? [] : {};
return _Definition.VALUE_TYPES.reduce(function (a, key) {
var value = void 0;
switch (key) {
case 'value':
case 'html':
value = binding.initValue || '';
break;
default:
value = {};
}
a[key] = value;
return a;
}, valueNode);
});
return Object.assign(valuesNodes, _defineProperty({}, _attributes2.default.SELF, (0, _object.toObject)(_Definition.VALUE_TYPES, {})));
}
function setValues(changeValues, statePath, calledDependences) {
calledDependences = calledDependences || [];
var accessor = (0, _State.createAccessor)(statePath);
if (!accessor.values) {
accessor.values = createStateNodes(statePath);
var start = +statePath.slice(-2)[0];
sendToRenderQueue(statePath, _defineProperty({}, _attributes2.default.FULL_CHANGE, { add: { start: start, end: start + 1 } }));
}
(0, _object.forEach)(changeValues, function (change, bindingName) {
return setValue(bindingName, change, accessor, calledDependences);
});
addLifeCycleHook('update', accessor.component.state[_attributes2.default.SELF], accessor.values, accessor, statePath.slice(-2)[0]);
if (isCollectingChanges()) {
return new Promise(function (res) {
return PROMISES_RESOLVES.push(res);
});
}
return applyChanges();
}
function setValue(bindingName, change, accessor, calledDependences) {
var statePath = accessor.path;
var component = accessor.component;
var values = accessor.values;
var binding = component.state[bindingName];
if (!binding) {
bindingName = component.outerNames[bindingName];
binding = component.state[bindingName];
}
if (!binding) {
throw new _BindingNotExistsError2.default(bindingName, component.name, statePath);
}
if (change.type === 'value') {
if (binding.isList) {
return setValueForList(binding, change, values[bindingName], accessor);
}
if ((0, _checkers.isObject)(change.value) && binding.isComponent) {
return setValueForComponent(binding, change, component, statePath);
}
}
var equal = (0, _equality2.default)(values[bindingName][change.type], change.value);
if (!equal || !(0, _checkers.isEmpty)(equal) || change.force) {
values[bindingName][change.type] = change.value;
if (change.type !== 'value') {
return sendToRenderQueue(statePath.concat(bindingName), _defineProperty({}, change.type, false));
}
addLifeCycleHook('update', binding, values, accessor, statePath.slice(-2)[0]);
calledDependences.push(bindingName + ':' + change.type);
if (!binding.html) {
setValue(bindingName, { value: change.value, type: 'html', force: change.force }, accessor, calledDependences);
}
var link = component.links[bindingName];
if (link) {
setValue(link.link, { value: change.value, type: 'value', force: change.force }, accessor.down(link.component), []);
}
(0, _object.forEach)(binding.dependants, function (dependant, dependantKey) {
if ((0, _common.has)(calledDependences, dependantKey)) {
return;
}
var newValue = component.state[dependant.name].evaluate[dependant.type](values, accessor);
setValue(dependant.name, { value: newValue, type: dependant.type }, accessor, calledDependences);
});
}
}
function setValueForList(binding, change, arr, accessor) {
Object.keys(change.value).forEach(function (i) {
return setValues((0, _State.prepareChangeObject)(change.value[i]), accessor.path.concat(binding.name, i, binding.listItem.name));
});
var indexEquality = (0, _equality2.default)(arr, change.value);
var removedCount = 0;
(0, _object.forEach)(indexEquality, function (changeObj, idx) {
if (changeObj.remove) {
changeObj.remove = removeListItem(arr, idx - removedCount, binding, accessor);
removedCount++;
}
sendToRenderQueue(accessor.path.concat(binding.name, idx, binding.listItem.name), _defineProperty({}, _attributes2.default.FULL_CHANGE, changeObj));
});
}
function setValueForComponent(binding, change, component, statePath) {
setValues((0, _State.prepareChangeObject)(change.value), statePath.concat(binding.name));
}
function modifyList(action, args, accessor) {
var arr = accessor.values;
var listPath = accessor.path;
var itemName = accessor.component.listItem.name;
var start = args.start;
var changeObj = {};
switch (action) {
case 'add':
changeObj = addToList(arr, start, args.els, listPath, itemName);
break;
case 'remove':
var end = start + (args.num || 1);
changeObj = removeFromList(arr, start, end, accessor.component, accessor);
break;
}
(0, _object.forEach)(changeObj, function (change, idx) {
return sendToRenderQueue(listPath.concat(idx, itemName, _attributes2.default.FULL_CHANGE), changeObj[idx][_attributes2.default.FULL_CHANGE]);
});
if (isCollectingChanges()) {
return new Promise(function (res) {
return PROMISES_RESOLVES.push(res);
});
}
return applyChanges();
}
function addToList(arr, start, els, listPath, itemName) {
els = (0, _checkers.isArray)(els) ? els : [els];
els.forEach(function (el, i) {
arr.splice(i + start, 0, null);
setValues((0, _State.prepareChangeObject)(el), listPath.concat(i + start, itemName));
});
return _defineProperty({}, start, _defineProperty({}, _attributes2.default.FULL_CHANGE, { add: { start: start, end: start + els.length } }));
}
function removeFromList(arr, start, end, listBinding, accessor) {
var changeObj = {};
for (var i = start; i < end; i++) {
var removedDOMNode = removeListItem(arr, i, listBinding, accessor);
changeObj[i] = _defineProperty({}, _attributes2.default.FULL_CHANGE, { remove: removedDOMNode });
}
return changeObj;
}
function removeListItem(arr, idx, listBinding, accessor) {
var removedNode = arr.splice(idx, 1)[0][listBinding.listItem.name];
addLifeCycleHook('remove', listBinding.listItem.state[_attributes2.default.SELF], removedNode, accessor, idx);
return removedNode[_attributes2.default.SELF].el;
}
/***/ }),
/***/ "./src/View.js":
/*!*********************!*\
!*** ./src/View.js ***!
\*********************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.renderChanges = exports.createAndAppendStyles = undefined;
var _attributes = __webpack_require__(/*! ./globals/attributes */ "./src/globals/attributes.js");
var _attributes2 = _interopRequireDefault(_attributes);
var _common = __webpack_require__(/*! ./helpers/common */ "./src/helpers/common.js");
var _object = __webpack_require__(/*! ./helpers/object */ "./src/helpers/object.js");
var _checkers = __webpack_require__(/*! ./helpers/checkers */ "./src/helpers/checkers.js");
var _State = __webpack_require__(/*! ./State */ "./src/State.js");
var _dom = __webpack_require__(/*! ./helpers/dom */ "./src/helpers/dom.js");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.createAndAppendStyles = createAndAppendStyles;
exports.renderChanges = renderChanges;
var render = {
html: applyMarkup,
class: applyClasses,
style: applyStyles,
attrs: applyAttributes
};
function renderChanges(changes) {
(0, _object.forEach)(changes, function (componentChanges, elementId) {
var componentId = Object.keys(componentChanges)[0];
var componentAccessor = (0, _State.createAccessor)([elementId, componentId]);
applyMarkup(componentAccessor.component, componentAccessor.values, [elementId, componentId], componentChanges[componentId]);
});
}
function applyMarkup(component, value, statePath, changes) {
if (!component.markup) {
return;
}
if (component.isList) {
return renderList(component, value, statePath, changes);
}
if (component.isComponent) {
return renderComponent(component, value, statePath, changes);
}
value = (0, _checkers.isUndefined)(value) ? '' : value;
if (component.markup.tagName === 'INPUT') {
component.markup.value = value;
return;
}
(0, _dom.rewriteToNode)(component.markup, value);
return component;
}
function renderComponent(component, value, statePath, changes) {
if (changes) {
return updateComponent(component, value, statePath, changes);
}
return drawComponent(value, statePath);
}
function drawComponent(values, statePath) {
var accessor = (0, _State.createAccessor)(statePath);
var component = accessor.component;
var itemWrapper = document.createElement('div');
var itemMarkup = component.template.cloneNode(true);
values[_attributes2.default.SELF].el = itemMarkup;
itemWrapper.appendChild(itemMarkup);
itemMarkup.setAttribute(_attributes2.default.COMPONENT_TYPE, component.isListItem ? _attributes2.default.ITEM : _attributes2.default.COMPONENT);
(0, _object.forEach)(render, function (renderFunc, renderType) {
if (component.state[_attributes2.default.SELF][renderType]) {
renderFunc({ markup: itemMarkup }, values[_attributes2.default.SELF][renderType]);
}
});
(0, _object.forEach)(component.state, function (binding, bindingName) {
if (!binding.markup) {
return;
}
var bindingNode = itemWrapper.querySelector('.' + _attributes2.default.PREFIX + binding.id);
bindingNode.classList.remove(_attributes2.default.PREFIX + binding.id);
if (binding.isComponent) {
var childStatePath = statePath.concat(bindingName);
var childMarkup = drawComponent(values[bindingName], childStatePath);
(0, _dom.replaceNodes)(bindingNode, childMarkup);
return;
}
values[bindingName].el = bindingNode;
if (binding.isList) {
renderList(binding, values[bindingName], statePath.concat(bindingName));
return;
}
(0, _object.forEach)(render, function (renderFunc, renderType) {
if (renderType === 'html' && binding.html === null || binding.markup.children.length) {
return;
}
renderFunc({ markup: bindingNode }, values[bindingName][renderType], statePath);
});
});
return itemWrapper.children[0];
}
function updateComponent(component, values, statePath, changes) {
var componentMarkup = values[_attributes2.default.SELF].el;
(0, _object.forEach)(changes, function (change, bindingName) {
if (bindingName === _attributes2.default.SELF) {
for (var changeType in change) {
render[changeType]({ markup: componentMarkup }, values[_attributes2.default.SELF][changeType]);
}
return;
}
var binding = component.state[bindingName];
var statePathToBinding = statePath.concat(bindingName);
if (binding.isComponent || binding.isList) {
applyMarkup(binding, values[bindingName], statePathToBinding, changes[bindingName]);
return;
}
var el = values[bindingName].el;
for (var _changeType in change) {
var newValue = values[bindingName][_changeType];
render[_changeType]({ markup: el }, newValue);
}
});
}
function renderList(listComponent, itemsValues, statePath, changes) {
if (changes) {
return updateList(listComponent, itemsValues, statePath, changes);
}
var listFragment = buildList(listComponent, itemsValues, statePath);
var listNode = itemsValues.el;
var parentNode = listNode.parentNode;
var nextNode = listNode.nextElementSibling;
(0, _dom.removeNode)(listNode);
var isComponent = listNode.getAttribute(_attributes2.default.COMPONENT_TYPE);
listNode.setAttribute(_attributes2.default.COMPONENT_TYPE, _attributes2.default.LIST);
if (isComponent) {
listNode.setAttribute(_attributes2.default.COMPONENT_TYPE, _attributes2.default.COMPONENT_LIST);
}
(0, _dom.emptyNode)(listNode);
listNode.appendChild(listFragment);
parentNode.insertBefore(listNode, nextNode);
}
function buildList(listComponent, itemsValues, statePath, range) {
var start = range ? range.start : 0;
var end = range ? range.end : itemsValues.length + 1;
var listFragment = document.createDocumentFragment();
itemsValues.slice(start, end).forEach(function (itemValue, itemIndex) {
var newItemIndex = +start + itemIndex;
var statePathToItem = statePath.concat([newItemIndex, listComponent.listItem.name]);
var listItemMarkup = drawComponent(itemValue[listComponent.listItem.name], statePathToItem);
listItemMarkup.setAttribute(_attributes2.default.ITEM_INDEX, newItemIndex);
listFragment.appendChild(listItemMarkup);
});
return listFragment;
}
function updateList(listBinding, itemsValues, statePath, changes) {
var listNode = itemsValues.el;
var itemName = listBinding.listItem.name;
var itemsChanges = (0, _object.splitPiece)(changes, function (v, k) {
return (0, _checkers.isNumber)(+k);
});
var removedCount = 0;
var end = 0;
for (var changeType in changes) {
render[changeType]({ markup: listNode }, itemsValues[changeType]);
}
(0, _object.forEach)(itemsChanges, function (change, i) {
change = change[itemName];
var statePathToItem = statePath.concat(i, itemName);
var itemAccessor = (0, _State.createAccessor)(statePathToItem);
if (!change[_attributes2.default.FULL_CHANGE]) {
updateComponent(itemAccessor.component, itemsValues[i][itemName], statePathToItem, change);
return;
}
if (change[_attributes2.default.FULL_CHANGE].add && i >= end) {
var start = change[_attributes2.default.FULL_CHANGE].add.start;
end = change[_attributes2.default.FULL_CHANGE].add.end;
fixIndexes(listNode, start, end - start);
var listFragment = buildList(listBinding, itemsValues, statePath, { start: start, end: end });
if (end === itemsValues.length) {
listNode.appendChild(listFragment);
return;
}
listNode.insertBefore(listFragment, listNode.children[start]);
return;
}
if (change[_attributes2.default.FULL_CHANGE].remove) {
(0, _dom.removeNode)(change[_attributes2.default.FULL_CHANGE].remove);
fixIndexes(listNode, i - removedCount++, -1);
}
});
}
function fixIndexes(listNode, start, diff) {
var itemsNodes = Array.prototype.slice.call(listNode.children, start);
itemsNodes.forEach(function (itemNode) {
var prevIdx = itemNode.getAttribute(_attributes2.default.ITEM_INDEX);
var newIdx = +prevIdx + diff;
itemNode.setAttribute(_attributes2.default.ITEM_INDEX, newIdx);
});
}
function applyAttributes(binding, attributes) {
Object.assign(binding.markup, attributes);
}
function applyClasses(binding, classes) {
for (var className in classes) {
if (className) {
if (classes[className]) {
binding.markup.classList.add(className);
} else {
binding.markup.classList.remove(className);
}
}
}
}
function applyStyles(binding, styles) {
Object.assign(binding.markup.style, normalizeStyles(styles));
}
function createAndAppendStyles(styleObj) {
var styleStr = '';
for (var selector in styleObj) {
var styles = styleObj[selector];
styleStr += selector + '{';
normalizeStyles(styles);
for (var attr in styles) {
var value = styles[attr];
styleStr += (0, _common.toDashCase)(attr) + ':' + value + ';';
}
styleStr += '}\n';
}
var styleEl = document.createElement('style');
styleEl.appendChild(document.createTextNode(styleStr));
document.head.appendChild(styleEl);
}
function normalizeStyles(styles) {
for (var attr in styles) {
if ((0, _checkers.isNumber)(styles[attr])) {
styles[attr] = styles[attr] + 'px';
}
}
return styles;
}
/***/ }),
/***/ "./src/errors/BindingNotExistsError.js":
/*!*********************************************!*\
!*** ./src/errors/BindingNotExistsError.js ***!
\*********************************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var BindingNotExistsError = function (_Error) {
_inherits(BindingNotExistsError, _Error);
function BindingNotExistsError(bindingName, componentName, path) {
_classCallCheck(this, BindingNotExistsError);
var _this = _possibleConstructorReturn(this, (BindingNotExistsError.__proto__ || Object.getPrototypeOf(BindingNotExistsError)).call(this));
_this.message = "Unable to access '" + bindingName + "' binding on '" + componentName + "' component on path (" + path.join(' -> ') + ") because it doesn't exist.";
return _this;
}
return BindingNotExistsError;
}(Error);
exports.default = BindingNotExistsError;
/***/ }),
/***/ "./src/errors/ComponentRedefineError.js":
/*!**********************************************!*\
!*** ./src/errors/ComponentRedefineError.js ***!
\**********************************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var ComponentRedefineError = function (_Error) {
_inherits(ComponentRedefineError, _Error);
function ComponentRedefineError(name) {
_classCallCheck(this, ComponentRedefineError);
var _this = _possibleConstructorReturn(this, (ComponentRedefineError.__proto__ || Object.getPrototypeOf(ComponentRedefineError)).call(this));
_this.message = "Trying to redefine existing component: '" + name + "'";
return _this;
}
return ComponentRedefineError;
}(Error);
exports.default = ComponentRedefineError;
/***/ }),
/***/ "./src/errors/ScopeNameCollisionError.js":
/*!***********************************************!*\
!*** ./src/errors/ScopeNameCollisionError.js ***!
\***********************************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var ScopeNameCollisionError = function (_Error) {
_inherits(ScopeNameCollisionError, _Error);
function ScopeNameCollisionError(name) {
_classCallCheck(this, ScopeNameCollisionError);
var _this = _possibleConstructorReturn(this, (ScopeNameCollisionError.__proto__ || Object.getPrototypeOf(ScopeNameCollisionError)).call(this));
_this.message = "Trying to assign a name '" + name + "' to a state that already exists in the chain.";
return _this;
}
return ScopeNameCollisionError;
}(Error);
exports.default = ScopeNameCollisionError;
/***/ }),
/***/ "./src/globals/attributes.js":
/*!***********************************!*\
!*** ./src/globals/attributes.js ***!
\***********************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var LIB_NAME = 'x';
var ATTR_PREFIX = 'data-';
var PREFIX = LIB_NAME + '-';
var TEMPLATE_BINDING = PREFIX + 'b';
var TEMPLATE_LIST_BINDING = PREFIX + 'lb';
var BINDING_ID = ATTR_PREFIX + PREFIX + 'id';
var STATE_PATH = PREFIX + 'spath';
var TEMPLATE_PLACEMENT = PREFIX + 'el';
var STATE_NAME = PREFIX + 'state-name';
var SCOPE_PREFIX = 's';
var ITEM_INDEX = ATTR_PREFIX + PREFIX + 'dx';
var COMPONENT_TYPE = ATTR_PREFIX + PREFIX + 'tp';
var ITEM_SUFFIX = 'i';
var STATE_DELIMITER = '-';
var SELF = '';
var FULL_CHANGE = ' _full_change_ ';
var BASE = '1';
var COMPONENT = '2';
var LIST = '3';
var ITEM = '_item_';
var COMPONENT_LIST = '5';
exports.default = {
PREFIX: PREFIX,
TEMPLATE_BINDING: TEMPLATE_BINDING,
TEMPLATE_LIST_BINDING: TEMPLATE_LIST_BINDING,
BINDING_ID: BINDING_ID,
STATE_PATH: STATE_PATH,
TEMPLATE_PLACEMENT: TEMPLATE_PLACEMENT,
STATE_NAME: STATE_NAME,
SCOPE_PREFIX: SCOPE_PREFIX,
ITEM_INDEX: ITEM_INDEX,
COMPONENT_TYPE: COMPONENT_TYPE,
ITEM_SUFFIX: ITEM_SUFFIX,
STATE_DELIMITER: STATE_DELIMITER,
BASE: BASE,
COMPONENT: COMPONENT,
LIST: LIST,
ITEM: ITEM,
SELF: SELF,
FULL_CHANGE: FULL_CHANGE,
COMPONENT_LIST: COMPONENT_LIST
};
/***/ }),
/***/ "./src/globals/regexp.js":
/*!*******************************!*\
!*** ./src/globals/regexp.js ***!
\*******************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var STRIP_COMMENTS = /(\/\/.*$)|(\/\*[\s\S]*?\*\/)|(\s*=[^,)]*(('(?:\\'|[^'\r\n])*')|("(?:\\"|[^"\r\n])*"))|(\s*=[^,)]*))/mg;
var ARGUMENT_NAMES = /([^\s,]+)/g;
exports.default = {
STRIP_COMMENTS: STRIP_COMMENTS,
ARGUMENT_NAMES: ARGUMENT_NAMES
};
/***/ }),
/***/ "./src/helpers/checkers.js":
/*!*********************************!*\
!*** ./src/helpers/checkers.js ***!
\*********************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.isFunction = isFunction;
exports.isArray = isArray;
exports.isObject = isObject;
exports.isObjectInBrackets = isObjectInBrackets;
exports.isObjectInDoubleBrackets = isObjectInDoubleBrackets;
exports.isString = isString;
exports.isHTMLString = isHTMLString;
exports.isNumber = isNumber;
exports.isDOMElement = isDOMElement;
exports.isUndefined = isUndefined;
exports.isEmpty = isEmpty;
exports.isLink = isLink;
function isFunction(obj) {
return getObjectType(obj) === '[object Function]';
}
function isArray(obj) {
return getObjectType(obj) === '[object Array]';
}
function isObject(obj) {
return getObjectType(obj) === '[object Object]';
}
function isObjectInBrackets(obj) {
return isArray(obj) && obj.length === 1 && isObject(obj[0]);
}
function isObjectInDoubleBrackets(obj) {
return isArray(obj) && obj.length === 1 && isObjectInBrackets(obj[0]);
}
function isString(obj) {
return getObjectType(obj) === '[object String]';
}
function isHTMLString(obj) {
return isString(obj) && obj.indexOf('<') === 0;
}
function isNumber(obj) {
return getObjectType(obj) === '[object Number]' && obj === obj;
}
function isDOMElement(obj) {
return obj && typeof obj.tagName !== 'undefined';
}
function isUndefined(obj) {
return typeof obj === 'undefined';
}
function getObjectType(obj) {
return Object.prototype.toString.call(obj);
}
function isLink(obj) {
var slashIdx = obj.indexOf('/');
return [0, 1, 2].some(function (idx) {
return idx === slashIdx;
});
}
function isEmpty(obj) {
if (!obj) {
return true;
}
if (isArray(obj) || isString(obj)) {
return !obj.length;
}
return !Object.keys(obj).length;
}
/***/ }),
/***/ "./src/helpers/common.js":
/*!*******************************!*\
!*** ./src/helpers/common.js ***!
\*******************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.getRealName = exports.getShortName = exports.toCamelCase = exports.toDashCase = exports.getFalsePaths = exports.has = undefined;
var _checkers = __webpack_require__(/*! ./checkers */ "./src/helpers/checkers.js");
var _object = __webpack_require__(/*! ./object */ "./src/helpers/object.js");
exports.has = has;
exports.getFalsePaths = getFalsePaths;
exports.toDashCase = toDashCase;
exports.toCamelCase = toCamelCase;
exports.getShortName = getShortName;
exports.getRealName = getRealName;
var NAMES = {
real: {},
short: []
};
function getShortName(name) {
if (NAMES.real[name]) {
return NAMES.real[name];
}
var shortName = NAMES.short.push(name);
return NAMES.real[name] = shortName - 1;
}
function getRealName(num) {
return NAMES.short[num];
}
function has(obj, el) {
if ((0, _checkers.isObject)(obj)) {
var keys = Object.keys(obj);
return has(keys, el);
}
return obj.indexOf(el) >= 0;
}
function getFalsePaths(obj) {
var onlyFalsePaths = {};
getFalsePath(obj, onlyFalsePaths, []);
return onlyFalsePaths;
}
function getFalsePath(obj, onlyFalsePaths, path) {
for (var key in obj) {
if ((0, _checkers.isObject)(obj[key])) {
getFalsePath(obj[key], onlyFalsePaths, path.concat(key));
continue;
}
if (obj[key] !== true) {
(0, _object.set)(onlyFalsePaths, path.concat(key), obj[key]);
}
}
}
function toDashCase(str) {
return str.replace(/([A-Z])/g, function (match) {
return '-' + match[0].toLowerCase();
});
}
function toCamelCase(str) {
return str.replace(/-(.)/g, function (match) {
return match[1].toUpperCase();
});
}
/***/ }),
/***/ "./src/helpers/copy.js":
/*!*****************************!*\
!*** ./src/helpers/copy.js ***!
\*****************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _checkers = __webpack_require__(/*! ./checkers */ "./src/helpers/checkers.js");
exports.default = copy;
function copy(destination, source) {
if (!destination) {
return copy({}, source);
}
for (var key in source) {
if ((0, _checkers.isUndefined)(source[key])) {
continue;
}
if (source.hasOwnProperty(key) && (0, _checkers.isObject)(source[key])) {
if (!destination[key]) {
destination[key] = {};
}
copy(destination[key], source[key]);
continue;
}
if ((0, _checkers.isArray)(source[key])) {
if (!destination[key]) {
destination[key] = [];
}
copyArray(destination[key], source[key]);
continue;
}
if ((0, _checkers.isDOMElement)(source[key])) {
destination[key] = source[key].cloneNode(true);
continue;
}
destination[key] = source[key];
}
return destination;
}
function copyArray(destination, source) {
for (var i = 0; i < source.length; i++) {
if ((0, _checkers.isObject)(source[i])) {
destination[i] = destination[i] || {};
copy(destination[i], source[i]);
continue;
}
if ((0, _checkers.isArray)(source[i])) {
destination[i] = destination[i] || [];
copyArray(destination[i], source[i]);
continue;
}
destination[i] = source[i];
}
return destination;
}
/***/ }),
/***/ "./src/helpers/dom.js":
/*!****************************!*\
!*** ./src/helpers/dom.js ***!
\****************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.emptyNode = exports.rewriteToNode = exports.insertBeforeNode = exports.removeNode = exports.walkNodes = exports.collectHTMLNodes = exports.cloneHTMLMarkup = exports.replaceNodes = undefined;
var _checkers = __webpack_require__(/*! ./checkers */ "./src/helpers/checkers.js");
exports.replaceNodes = replaceNodes;
exports.cloneHTMLMarkup = cloneHTMLMarkup;
exports.collectHTMLNodes = collectHTMLNodes;
exports.walkNodes = walkNodes;
exports.removeNode = removeNode;
exports.insertBeforeNode = insertBeforeNode;
exports.rewriteToNode = rewriteToNode;
exports.emptyNode = emptyNode;
function replaceNodes(original, replacement) {
original.parentNode.replaceChild(replacement, original);
return replacement;
}
function insertBeforeNode(el, nextEl) {
nextEl.parentNode.insertBefore(el, nextEl);
}
function cloneHTMLMarkup(markup) {
var markupStr = (0, _checkers.isHTMLString)(markup.trim()) ? markup : document.querySelector(markup).innerHTML;
return convertStringToHTML(markupStr);
}
function convertStringToHTML(markupString) {
var parser = new DOMParser();
var parsedDocument = parser.parseFromString(markupString, 'text/html');
return parsedDocument.body.firstElementChild;
}
function walkNodes(node, cb) {
if (cb(node) === -1) {
return;
}
Array.prototype.slice.call(node.children).forEach(function (el) {
return walkNodes(el, cb);
});
}
function collectHTMLNodes(root, isWanted) {
var nodes = [];
walkNodes(root, function (el) {
return isWanted(el) ? nodes.push(el) : '';
});
return nodes;
}
function removeNode(node) {
node.parentNode.removeChild(node);
}
function rewriteToNode(node, text) {
emptyNode(node);
writeToNode(node, text);
}
function writeToNode(node, text) {
var textNode = document.createTextNode(text);
node.appendChild(textNode);
}
function emptyNode(node) {
while (node.hasChildNodes()) {
node.removeChild(node.firstChild);
}
}
/***/ }),
/***/ "./src/helpers/equality.js":
/*!*********************************!*\
!*** ./src/helpers/equality.js ***!
\*********************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; };
var _checkers = __webpack_require__(/*! ./checkers */ "./src/helpers/checkers.js");
exports.default = areEqual;
function areEqual(first, second) {
if ((0, _checkers.isUndefined)(first) || (0, _checkers.isUndefined)(second)) {
return false;
}
if ((typeof first === 'undefined' ? 'undefined' : _typeof(first)) !== (typeof second === 'undefined' ? 'undefined' : _typeof(second))) {
return false;
}
var parentNode = { childNode: {} };
if ((0, _checkers.isArray)(second)) {
checkKeysEquality(first ? first.map(function (el) {
return el;
}) : [], second.map(function (el) {
return el;
}), parentNode);
} else if ((0, _checkers.isObject)(second)) {
checkKeysEquality(first, second, parentNode);
} else {
return first === second;
}
return parentNode.childNode;
}
function checkKeysEquality(first, second, parentNode) {
for (var key in second) {
if ((0, _checkers.isArray)(first)) {
if ((0, _checkers.isUndefined)(first[key])) {
parentNode.childNode[key] = { add: true };
}
continue;
}
if (first[key] !== second[key]) {
parentNode.childNode[key] = false;
}
}
for (var _key in first) {
if ((0, _checkers.isArray)(first)) {
if ((0, _checkers.isUndefined)(second[_key])) {
parentNode.childNode[_key] = { remove: true };
}
continue;
}
if (first[_key] !== second[_key]) {
parentNode.childNode[_key] = false;
}
}
return parentNode;
}
/***/ }),
/***/ "./src/helpers/object.js":
/*!*******************************!*\
!*** ./src/helpers/object.js ***!
\*******************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.splitPiece = exports.fullMap = exports.toObject = exports.filter = exports.mapKeys = exports.map = exports.forEach = exports.addConstFields = exports.remove = exports.set = exports.get = undefined;
var _checkers = __webpack_require__(/*! ./checkers */ "./src/helpers/checkers.js");
exports.get = get;
exports.set = set;
exports.remove = remove;
exports.addConstFields = addConstFields;
exports.forEach = forEach;
exports.map = map;
exports.mapKeys = mapKeys;
exports.filter = filter;
exports.toObject = toObject;
exports.fullMap = fullMap;
exports.splitPiece = splitPiece;
function get(obj, path) {
var value = obj;
for (var i = 0; i < path.length; i++) {
try {
value = value[path[i]];
} catch (e) {
return value;
}
}
return value;
}
function set(obj, path, value) {
if (!path.length) {
if ((0, _checkers.isObject)(value)) {
return Object.assign(obj, value);
}
return obj = value;
}
var dest = obj;
for (var i = 0; i < path.length - 1; i++) {
if (!dest[path[i]]) {
dest = dest[path[i]] = {};
} else {
dest = dest[path[i]];
}
}
if ((0, _checkers.isObject)(value)) {
dest[path[i]] = dest[path[i]] || {};
Object.assign(dest[path[i]], value);
} else {
dest[path[i]] = value;
}
return obj;
}
function remove(obj, path) {
var value = obj;
for (var i = 0; i < path.length - 1; i++) {
value = value[path[i]];
}
delete value[path[i]];
}
function addConstFields(obj, fields) {
var _loop = function _loop(field) {
Object.defineProperty(obj, field, {
get: function get() {
return fields[field];
},
set: function set() {
throw new Error("Trying to redefine const field '" + field + "'");
}
});
};
for (var field in fields) {
_loop(field);
}
}
function forEach(obj, cb) {
for (var key in obj) {
var prop = obj[key];
cb(prop, key, obj);
}
}
function map(obj, cb) {
var newObj = {};
for (var key in obj) {
var prop = obj[key];
newObj[key] = cb(prop, key, obj);
}
return newObj;
}
function mapKeys(obj, cb) {
var newObj = {};
for (var key in obj) {
var prop = obj[key];
var newKey = cb(key, prop, obj);
newObj[newKey] = prop;
}
return newObj;
}
function splitPiece(obj, cb) {
var newObj = {};
for (var key in obj) {
var prop = obj[key];
if (cb(prop, key, obj)) {
newObj[key] = prop;
delete obj[key];
}
}
return newObj;
}
function fullMap(obj, cb) {
var newObj = {};
for (var key in obj) {
var prop = obj[key];
var res = cb(key, prop, obj);
newObj[res[0] || res.key || res.k] = res[1] || res['value'] || res['v'];
}
return newObj;
}
function filter(obj, cb) {
var newObj = {};
for (var key in obj) {
var prop = obj[key];
if (cb(prop, key, obj)) {
newObj[key] = prop;
}
}
return newObj;
}
function toObject(arr, val) {
var newObj = {};
arr.forEach(function (v) {
newObj[v] = (0, _checkers.isFunction)(val) ? val(v) : val;
});
return newObj;
}
/***/ }),
/***/ "./src/main.js":
/*!*********************!*\
!*** ./src/main.js ***!
\*********************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
var _Definition = __webpack_require__(/*! ./Definition */ "./src/Definition.js");
var _Production = __webpack_require__(/*! ./Production */ "./src/Production.js");
module.exports = {
define: _Definition.define,
apply: _Production.apply
};
/***/ })
/******/ });
});
/***/ }),
/***/ "./src/common/constants.js":
/*!*********************************!*\
!*** ./src/common/constants.js ***!
\*********************************/
/*! exports provided: EXTENSION_ID, FIND_HELPER_CLASS, COLORS, FIND_SUGGESTION, KEYBOARD_KEYS */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "EXTENSION_ID", function() { return EXTENSION_ID; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "FIND_HELPER_CLASS", function() { return FIND_HELPER_CLASS; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "COLORS", function() { return COLORS; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "FIND_SUGGESTION", function() { return FIND_SUGGESTION; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "KEYBOARD_KEYS", function() { return KEYBOARD_KEYS; });
const EXTENSION_ID = 'multi-word-searcher-webextension-unique-id';
const FIND_HELPER_CLASS = EXTENSION_ID + '-find-helper';
const COLORS = [
'255, 255, 25',
'60, 180, 75',
'230, 25, 75',
'0, 130, 200',
'245, 130, 48',
'145, 30, 180',
'240, 50, 230',
'128, 128, 128',
'210, 245, 60',
'250, 190, 190',
];
const FIND_SUGGESTION = 'Find text on page';
const DIGIT_KEYS = Array(10).fill(48).reduce((a , v, i) => (a[i] = 48 + i) && a , {});
const KEYBOARD_KEYS = {
...DIGIT_KEYS,
ESC: 27,
ENTER: 13,
SHIFT: 16,
CTRL: 17,
ALT: 18,
DELETE: 46,
BACKSPACE: 8,
w: 87,
e: 69,
r: 82,
a: 65,
s: 83,
d: 68,
f: 70,
c: 67,
b: 66,
UP: 38,
LEFT: 37,
DOWN: 40,
RIGHT: 39,
};
/***/ }),
/***/ "./src/common/interaction.js":
/*!***********************************!*\
!*** ./src/common/interaction.js ***!
\***********************************/
/*! exports provided: getCurrentTab, sendMessage, onMessage */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getCurrentTab", function() { return getCurrentTab; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "sendMessage", function() { return sendMessage; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "onMessage", function() { return onMessage; });
const PROMISES_RESOLVES = {};
function getCurrentTab () {
return browser.tabs.query({ active: true, currentWindow: true });
}
function sendMessage (action, payload) {
const { callbackId, promise } = payload.callbackId || saveCallback(action);
const message = { action, callbackId, ...payload };
if (window.__IS_BACKGROUND_SCRIPT__) {
getCurrentTab().then((tab) => browser.tabs.sendMessage(tab[0].id, message));
} else {
browser.runtime.sendMessage(message);
}
return promise;
}
async function onMessage (message, actions = {}) {
if (message.isAnswer) {
PROMISES_RESOLVES[message.callbackId](message);
delete PROMISES_RESOLVES[message.callbackId];
return;
}
if (actions[message.action]) {
const result = await actions[ message.action ](message);
if (message.callbackId) {
sendMessage(message.action, { callbackId: message.callbackId, isAnswer: true, ...result });
}
}
}
function saveCallback (action, cb) {
const callbackId = Date.now() + Math.random() + action;
return {
promise: new Promise ((res) => PROMISES_RESOLVES[ callbackId ] = res),
callbackId
};
}
/***/ }),
/***/ "./src/page-script.js":
/*!****************************!*\
!*** ./src/page-script.js ***!
\****************************/
/*! no exports provided */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var couli__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! couli */ "../couli/dist/couli.js");
/* harmony import */ var couli__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(couli__WEBPACK_IMPORTED_MODULE_0__);
/* harmony import */ var _page_store__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./page/store */ "./src/page/store.js");
/* harmony import */ var _page_ui_components_search_string_index__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./page/ui/components/search-string/index */ "./src/page/ui/components/search-string/index.js");
/* harmony import */ var _page_ui_components_controls_index__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./page/ui/components/controls/index */ "./src/page/ui/components/controls/index.js");
/* harmony import */ var _page_ui_components_popup_index__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./page/ui/components/popup/index */ "./src/page/ui/components/popup/index.js");
/* harmony import */ var _common_constants__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./common/constants */ "./src/common/constants.js");
/* harmony import */ var _common_interaction__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./common/interaction */ "./src/common/interaction.js");
/* harmony import */ var _page_global_styles__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./page/global-styles */ "./src/page/global-styles.js");
/* harmony import */ var _page_actions__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./page/actions */ "./src/page/actions.js");
Object(_page_global_styles__WEBPACK_IMPORTED_MODULE_7__["appendStyles"])(_page_global_styles__WEBPACK_IMPORTED_MODULE_7__["stylesString"]);
initializePopup();
window.onunload = () => { Object(_common_interaction__WEBPACK_IMPORTED_MODULE_6__["sendMessage"])('closingTab', { tabId: _page_store__WEBPACK_IMPORTED_MODULE_1__["default"].tabId() }) }
browser.runtime.onMessage.addListener((message) => Object(_common_interaction__WEBPACK_IMPORTED_MODULE_6__["onMessage"])(message, _page_actions__WEBPACK_IMPORTED_MODULE_8__["default"]));
function initializePopup () {
const popupContainer = document.createElement('div');
popupContainer.style.zIndex = 2147483647;
popupContainer.id = _common_constants__WEBPACK_IMPORTED_MODULE_5__["EXTENSION_ID"];
const popup = document.createElement('popup');
popupContainer.appendChild(popup);
document.body.appendChild(popupContainer);
couli__WEBPACK_IMPORTED_MODULE_0___default.a.apply('#' + _common_constants__WEBPACK_IMPORTED_MODULE_5__["EXTENSION_ID"]);
return window.EXTENSION_POPUP_INTERFACE;
}
/***/ }),
/***/ "./src/page/actions.js":
/*!*****************************!*\
!*** ./src/page/actions.js ***!
\*****************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _common_constants__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../common/constants */ "./src/common/constants.js");
/* harmony import */ var _store__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./store */ "./src/page/store.js");
/* harmony import */ var _common_interaction__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../common/interaction */ "./src/common/interaction.js");
/* harmony default export */ __webpack_exports__["default"] = ({
saveTabId,
notifyOfClosing,
popupState,
openSearchGroup,
setContextMenu,
removeSearch,
removeAllContextMenus
});
function saveTabId (message) {
_store__WEBPACK_IMPORTED_MODULE_1__["default"].tabId(message.tabId);
}
function popupState (state) {
_store__WEBPACK_IMPORTED_MODULE_1__["default"].setPopupState(state.open);
}
function notifyOfClosing () {
return Object(_common_interaction__WEBPACK_IMPORTED_MODULE_2__["sendMessage"])('closingPopup', { tabId: _store__WEBPACK_IMPORTED_MODULE_1__["default"].tabId() });
}
function openSearchGroup ({ idx }) {
_store__WEBPACK_IMPORTED_MODULE_1__["default"].inputFocusNeeded(true);
_store__WEBPACK_IMPORTED_MODULE_1__["default"].setCurrentSearch(idx);
}
function removeSearch ({ idx }) {
if (idx === -1) {
_store__WEBPACK_IMPORTED_MODULE_1__["default"].removeSearch({ all: true });
return;
}
_store__WEBPACK_IMPORTED_MODULE_1__["default"].removeSearch({ idx })
}
function setContextMenu (idx, string) {
Object(_common_interaction__WEBPACK_IMPORTED_MODULE_2__["sendMessage"])('setContextMenu', { tabId: _store__WEBPACK_IMPORTED_MODULE_1__["default"].tabId(), idx, string });
}
function removeAllContextMenus () {
Object(_common_interaction__WEBPACK_IMPORTED_MODULE_2__["sendMessage"])('removeAllContextMenus', { tabId: _store__WEBPACK_IMPORTED_MODULE_1__["default"].tabId() });
}
/***/ }),
/***/ "./src/page/find.js":
/*!**************************!*\
!*** ./src/page/find.js ***!
\**************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _common_interaction__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../common/interaction */ "./src/common/interaction.js");
/* harmony import */ var _fixes__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./fixes */ "./src/page/fixes.js");
/* harmony default export */ __webpack_exports__["default"] = (find);
function find (searchStrings, caseSensitive) {
Object(_fixes__WEBPACK_IMPORTED_MODULE_1__["FIND_API_INPUT_BUG_FIX_BEFORE"])();
return Object(_common_interaction__WEBPACK_IMPORTED_MODULE_0__["sendMessage"])('find', {
string: searchStrings[0].string,
caseSensitive,
}).then(({ foundResults }) => {
Object(_fixes__WEBPACK_IMPORTED_MODULE_1__["FIND_API_INPUT_BUG_FIX_AFTER"])();
const allTextNodes = getAllTextNodes();
return filterRanges(foundResults, searchStrings.slice(1), caseSensitive, allTextNodes);
});
}
function getAllTextNodes () {
const allTextNodes = [];
const walker = document.createTreeWalker(document, window.NodeFilter.SHOW_TEXT, null, false);
let node;
while(node = walker.nextNode()) {
allTextNodes.push(node);
}
return allTextNodes;
}
function filterRanges (firstFoundPart, searchRefinements, caseSensitive, allTextNodes) {
const finalRanges = [];
firstFoundPart.rangeData.forEach((rangeOpts) => {
let endNodePos = rangeOpts.endTextNodePos;
let endNode = allTextNodes[rangeOpts.endTextNodePos];
let endOffset = rangeOpts.endOffset;
for (let i = 0; i < searchRefinements.length; i++) {
const refinement = searchRefinements[i];
let nextNode = endNode;
const searchDistance = +refinement.distance + refinement.string.length;
let remainingSearchDistance = searchDistance + endOffset;
let nextOffset = remainingSearchDistance;
let success = false;
let prevNodesLength = 0;
do {
let nodeValueLength = nextNode.nodeValue.length;
if (nextOffset > nodeValueLength) {
nextOffset = nodeValueLength;
}
const matchPos = isStringInRange(
endNode, endOffset,
nextNode, nextOffset,
refinement.string,
caseSensitive,
);
if (matchPos >= 0) {
success = true;
endNode = nextNode;
endOffset = endOffset + matchPos - prevNodesLength + refinement.string.length;
break;
}
remainingSearchDistance = remainingSearchDistance - nodeValueLength;
nextOffset = remainingSearchDistance;
prevNodesLength += nodeValueLength;
nextNode = allTextNodes[++endNodePos];
} while ( remainingSearchDistance > 0 && !success && nextNode )
if (!success) {
return;
}
}
try {
const startNode = allTextNodes[rangeOpts.startTextNodePos];
const range = new Range();
range.setStart(startNode, rangeOpts.startOffset);
range.setEnd(endNode, endOffset);
finalRanges.push(range);
} catch (e) {}
});
return finalRanges;
}
function isStringInRange(startNode, startOffset, endNode, endOffset, string, caseSensitive) {
const range = new Range();
range.setStart(startNode, startOffset);
range.setEnd(endNode, endOffset);
let rangeText = range.toString();
if (!caseSensitive) {
string = string.toLowerCase();
rangeText = rangeText.toLowerCase();
}
const matchPos = rangeText.indexOf(string);
return matchPos;
}
/***/ }),
/***/ "./src/page/fixes.js":
/*!***************************!*\
!*** ./src/page/fixes.js ***!
\***************************/
/*! exports provided: FIND_API_INPUT_BUG_FIX_BEFORE, FIND_API_INPUT_BUG_FIX_AFTER */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "FIND_API_INPUT_BUG_FIX_BEFORE", function() { return FIND_API_INPUT_BUG_FIX_BEFORE; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "FIND_API_INPUT_BUG_FIX_AFTER", function() { return FIND_API_INPUT_BUG_FIX_AFTER; });
/**********************************************************************
TEMPORARY FIX FOR FINDING STRINGS THAT ARE ENTERED IN INPUTS
***********************************************************************/
function FIND_API_INPUT_BUG_FIX_BEFORE (cb) {
window.inputValues = [];
document.querySelectorAll('input').forEach((input) => {
inputValues.push(input.value);
input.value = '';
});
}
function FIND_API_INPUT_BUG_FIX_AFTER () {
document.querySelectorAll('input').forEach((input, i) => {
input.value = window.inputValues[i];
});
}
/***/ }),
/***/ "./src/page/global-styles.js":
/*!***********************************!*\
!*** ./src/page/global-styles.js ***!
\***********************************/
/*! exports provided: stylesString, appendStyles */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "stylesString", function() { return stylesString; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "appendStyles", function() { return appendStyles; });
/* harmony import */ var _common_constants__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./../common/constants */ "./src/common/constants.js");
/* harmony import */ var _store__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./store */ "./src/page/store.js");
let stylesString = _store__WEBPACK_IMPORTED_MODULE_1__["default"].COLORS.map((color, idx) => `
@keyframes ${_common_constants__WEBPACK_IMPORTED_MODULE_0__["FIND_HELPER_CLASS"]}${idx} {
0% {
outline-color: rgba(${color}, 1);
}
100% {
outline-color: rgba(${color}, 0);
}
}
.${_common_constants__WEBPACK_IMPORTED_MODULE_0__["FIND_HELPER_CLASS"]}${idx} {
animation-name: '${_common_constants__WEBPACK_IMPORTED_MODULE_0__["FIND_HELPER_CLASS"]}${idx}';
outline-style: solid;
animation-duration: 1.3s;
animation-timing-function: linear;
animation-iteration-count: infinite;
}
`).join('');
stylesString += `
@keyframes ${_common_constants__WEBPACK_IMPORTED_MODULE_0__["EXTENSION_ID"]}_outline_blinker {
50% {
outline-color: rgba(0, 0, 0, 0);
}
}
`;
function appendStyles (styleStr) {
const styleEl = document.createElement('style');
styleEl.appendChild( document.createTextNode(styleStr) );
document.head.appendChild(styleEl);
}
/***/ }),
/***/ "./src/page/highlightings.js":
/*!***********************************!*\
!*** ./src/page/highlightings.js ***!
\***********************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _store__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./store */ "./src/page/store.js");
/* harmony import */ var _common_constants__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../common/constants */ "./src/common/constants.js");
/* harmony import */ var _ui_index__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./ui/index */ "./src/page/ui/index.js");
/* harmony import */ var _ui_components_highlighting_styles__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./ui/components/highlighting/styles */ "./src/page/ui/components/highlighting/styles.js");
/* harmony import */ var _ui_components_scrollbarMark_styles__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./ui/components/scrollbarMark/styles */ "./src/page/ui/components/scrollbarMark/styles.js");
const HIGHLIGHTINGS_POSITIONS = [];
/* harmony default export */ __webpack_exports__["default"] = ({
create: createHighlightings,
remove: removeHighlightings,
moveTo: jumpTo,
switchBlinking,
});
function createHighlightings (ranges, data) {
removeHighlightings(data.searchId);
const doc = document.documentElement;
const scrollLeft = (window.pageXOffset || doc.scrollLeft) - (doc.clientLeft || 0);
const scrollTop = (window.pageYOffset || doc.scrollTop) - (doc.clientTop || 0);
ranges.forEach((range, rangeIdx) => {
const boundingRects = range.getClientRects();
const topPosition = scrollTop + boundingRects[0].top;
const highlightData = {
els: [],
top: topPosition,
left: scrollLeft + boundingRects[0].left
};
for (let i = 0; i < boundingRects.length; i++) {
const rect = boundingRects[i];
const highlightElement = createHightlightElement(rect, scrollTop, scrollLeft, data);
document.body.appendChild(highlightElement);
highlightData.els.push(highlightElement);
}
const scrollBarMark = createScrollbarMark(topPosition, data, rangeIdx);
document.body.appendChild(scrollBarMark);
HIGHLIGHTINGS_POSITIONS[data.searchId].push(highlightData);
});
}
function createHightlightElement (rect, scrollTop, scrollLeft, data) {
const highlight = Object(_ui_index__WEBPACK_IMPORTED_MODULE_2__["createElement"])(
'span',
Object(_ui_components_highlighting_styles__WEBPACK_IMPORTED_MODULE_3__["default"])({ scrollTop, scrollLeft, rect, ...data })
);
highlight.classList.add(_common_constants__WEBPACK_IMPORTED_MODULE_1__["EXTENSION_ID"] + data.searchId);
return highlight;
}
function createScrollbarMark (topPosition, data, rangeIdx) {
const scrollBarMark = Object(_ui_index__WEBPACK_IMPORTED_MODULE_2__["createElement"])(
'span',
Object(_ui_components_scrollbarMark_styles__WEBPACK_IMPORTED_MODULE_4__["default"])({ topPosition, ...data })
);
scrollBarMark.title = data.searchString;
scrollBarMark.onclick = () => {
jumpTo(data.searchId, rangeIdx);
_store__WEBPACK_IMPORTED_MODULE_0__["default"].moveThroughSearch({ searchId: data.searchId, highlightPosition: rangeIdx + 1 });
};
scrollBarMark.classList.add(_common_constants__WEBPACK_IMPORTED_MODULE_1__["EXTENSION_ID"] + data.searchId);
return scrollBarMark;
}
function jumpTo (searchId, id) {
const highlightPosition = HIGHLIGHTINGS_POSITIONS[searchId][id];
const centerHeight = window.innerHeight / 2;
const centerWidth = window.innerWidth / 2;
window.scrollTo(
highlightPosition.left - centerWidth,
highlightPosition.top - centerHeight
);
}
function removeHighlightings (searchId) {
document.querySelectorAll('.' + _common_constants__WEBPACK_IMPORTED_MODULE_1__["EXTENSION_ID"] + searchId)
.forEach((el) => el.parentNode.removeChild(el));
HIGHLIGHTINGS_POSITIONS[searchId] = [];
}
function switchBlinking (searchId, operation) {
HIGHLIGHTINGS_POSITIONS[searchId]
.forEach((highlightData) => highlightData.els
.forEach((el) => el.classList[operation](`${_common_constants__WEBPACK_IMPORTED_MODULE_1__["FIND_HELPER_CLASS"]}${searchId}`)));
}
/***/ }),
/***/ "./src/page/store.js":
/*!***************************!*\
!*** ./src/page/store.js ***!
\***************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _actions__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./actions */ "./src/page/actions.js");
/* harmony import */ var _highlightings__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./highlightings */ "./src/page/highlightings.js");
/* harmony import */ var _find__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./find */ "./src/page/find.js");
/* harmony import */ var _common_constants__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../common/constants */ "./src/common/constants.js");
const LISTENERS = [];
const store = {
COLORS: _common_constants__WEBPACK_IMPORTED_MODULE_3__["COLORS"],
getCurrentSearch,
addListener,
tabId,
startSearch,
moveThroughSearch,
switchCaseSensitivity,
switchBlink,
removeSearch,
getPopupData,
setPopupState,
closePopup,
setCurrentSearch,
setupSearch,
getCurrentString,
handleInputActivity,
removeSearchString,
addNewSearchString,
updateStringDistance,
inputFocusNeeded,
HTMLElement,
};
const STATE = {
popupOpen: true,
tabId: null,
searches: _common_constants__WEBPACK_IMPORTED_MODULE_3__["COLORS"].map((c, i) => initiateSearchOpts(i, '')),
searchId: 1,
inputFocusNeeded: false,
HTMLElements: {},
};
function initiateSearchOpts (i, string) {
return {
searchStrings: initiateSearchStrings(string),
foundResults: 0,
lastFocused: 0,
searchHappened: false,
highlightPosition: 0,
color: _common_constants__WEBPACK_IMPORTED_MODULE_3__["COLORS"][i],
searchId: i,
caseSensitive: false,
}
}
function initiateSearchStrings (strings) {
const splitStrings = strings.split(' ');
return splitStrings.map((string, i) => {
const distance = !i ? null : 1;
return { string, first: !i, focus: !i, distance };
});
}
function getCurrentSearch () {
return STATE.searches[ STATE.searchId ];
}
function setCurrentSearch (i) {
STATE.searchId = +i;
return runListeners();
}
function setupSearch (i, string) {
return STATE.searches[ i ] = initiateSearchOpts(+i, string);
}
function startSearch () {
const search = getCurrentSearch();
const { searchId, color, caseSensitive, searchStrings } = search;
const searchString = searchStrings.map((strObj) => strObj.string).join(' ');
_actions__WEBPACK_IMPORTED_MODULE_0__["default"].setContextMenu(searchId, searchString);
return Object(_find__WEBPACK_IMPORTED_MODULE_2__["default"])(searchStrings, caseSensitive).then((foundResults) => {
_highlightings__WEBPACK_IMPORTED_MODULE_1__["default"].create(foundResults, { searchId, color, searchString });
const changeObj = { foundResults: foundResults.length, searchHappened: true, blinkSet: false, searchStrings };
if (foundResults.length) {
changeObj.highlightPosition = 1;
_highlightings__WEBPACK_IMPORTED_MODULE_1__["default"].moveTo(searchId, 0);
}
Object.assign(search, changeObj);
runListeners();
});
}
function moveThroughSearch (opts, withMove) {
const searchId = adjustNumberToBoundaries(opts.searchId, 0, STATE.searches.length - 1, STATE.searchId);
STATE.searchId = searchId;
const search = getCurrentSearch();
const highlightPosition = adjustNumberToBoundaries(opts.highlightPosition, 1, search.foundResults, search.highlightPosition);
search.highlightPosition = highlightPosition;
if (withMove && highlightPosition) {
_highlightings__WEBPACK_IMPORTED_MODULE_1__["default"].moveTo(searchId, highlightPosition - 1);
}
return runListeners();
}
function adjustNumberToBoundaries (num, min, max, def) {
switch (num) {
case min - 1:
return max;
case undefined:
return def;
case max + 1:
return min;
}
return num;
}
function removeSearch ({ all, idx }) {
if (all) {
removeAllSearches();
_actions__WEBPACK_IMPORTED_MODULE_0__["default"].removeAllContextMenus();
} else {
const searchId = typeof idx === 'undefined' ? STATE.searchId : idx;
removeSearchByIdx(searchId);
_actions__WEBPACK_IMPORTED_MODULE_0__["default"].setContextMenu(searchId, '');
}
return runListeners();
}
function removeSearchByIdx (idx) {
STATE.searches[idx] = initiateSearchOpts(idx, '');
_highlightings__WEBPACK_IMPORTED_MODULE_1__["default"].remove(idx);
}
function removeAllSearches () {
_common_constants__WEBPACK_IMPORTED_MODULE_3__["COLORS"].forEach((c, idx) => removeSearchByIdx(idx));
}
function switchBlink () {
const search = getCurrentSearch();
if (!search.foundResults) {
return;
}
const operation = search.blinkSet ? 'remove' : 'add';
_highlightings__WEBPACK_IMPORTED_MODULE_1__["default"].switchBlinking(search.searchId, operation);
search.blinkSet = !search.blinkSet;
return runListeners();
}
function getCurrentString () {
return getCurrentSearch().searchStrings.map(({ string }) => string).join(' ');
}
function switchCaseSensitivity () {
const caseSensitive = STATE.searches[ STATE.searchId ].caseSensitive;
STATE.searches[ STATE.searchId ].caseSensitive = !caseSensitive;
return runListeners();
}
function addListener (fn) {
LISTENERS.push(fn);
}
function runListeners () {
LISTENERS.forEach((fn) => fn(store));
return Promise.resolve();
}
function tabId (id) {
if (id) {
return STATE.tabId = id
}
return STATE.tabId;
}
function getPopupData () {
return {
open: STATE.popupOpen,
searches: STATE.searches.filter((el) => el.searchHappened).map((el) => ({
color: el.color,
string: el.searchStrings.map((string) => string.string).join(' '),
idx: el.searchId,
}))
}
}
function closePopup () {
_actions__WEBPACK_IMPORTED_MODULE_0__["default"].notifyOfClosing().then(() => setPopupState(false));
}
function setPopupState (open) {
STATE.popupOpen = open;
return runListeners();
}
function handleInputActivity (e, idx) {
const search = getCurrentSearch();
const { searchStrings, lastFocused } = search;
const currentString = searchStrings[ lastFocused ];
if (e.keyCode) {
if (e.keyCode === _common_constants__WEBPACK_IMPORTED_MODULE_3__["KEYBOARD_KEYS"].ENTER) {
if (e.shiftKey) {
currentString.focus = false;
searchStrings.splice(lastFocused + 1, 0, { string: '', focus: true, distance: 1 });
search.lastFocused = lastFocused + 1;
return runListeners();
}
if (e.ctrlKey) {
const newSearchStrings = splitSearchString(currentString);
if (lastFocused === 0) {
newSearchStrings[0].first = true;
newSearchStrings[0].distance = null;
}
newSearchStrings.slice(-1)[0].focus = true;
searchStrings.splice.apply(searchStrings, [lastFocused, 1].concat(newSearchStrings));
search.lastFocused = lastFocused + newSearchStrings.length - 1;
return runListeners();
}
if (e.altKey) {
HTMLElement('searchId').focus();
return;
}
return startSearch();
}
if (e.keyCode === _common_constants__WEBPACK_IMPORTED_MODULE_3__["KEYBOARD_KEYS"].BACKSPACE && !currentString.string.length && searchStrings.length > 1) {
searchStrings[ lastFocused - 1 ].focus = true;
search.lastFocused = lastFocused - 1;
searchStrings.splice(lastFocused, 1);
runListeners();
return;
}
}
if (e.type === 'click') {
currentString.focus = false;
searchStrings[ idx ].focus = true;
search.lastFocused = idx;
runListeners();
return;
}
if ([_common_constants__WEBPACK_IMPORTED_MODULE_3__["KEYBOARD_KEYS"].CTRL, _common_constants__WEBPACK_IMPORTED_MODULE_3__["KEYBOARD_KEYS"].SHIFT, _common_constants__WEBPACK_IMPORTED_MODULE_3__["KEYBOARD_KEYS"].ALT].includes(e.keyCode)) {
return;
}
currentString.string = e.target.value;
runListeners();
}
function removeSearchString () {
const search = getCurrentSearch();
const { searchStrings, lastFocused } = search;
searchStrings[ lastFocused - 1 ].focus = true;
search.lastFocused = lastFocused - 1;
searchStrings.splice(lastFocused, 1);
runListeners();
}
function addNewSearchString () {
const search = getCurrentSearch();
const { searchStrings, lastFocused } = search;
searchStrings[ lastFocused ].focus = false;
searchStrings.splice(lastFocused + 1, 0, { string: '', focus: true, distance: 1 });
search.lastFocused = lastFocused + 1;
runListeners();
}
function updateStringDistance (distance, idx) {
const search = getCurrentSearch();
search.searchStrings[ idx ].distance = toNumberOrZero(distance);
runListeners();
}
function toNumberOrZero (str) {
return +str || 0;
}
function inputFocusNeeded (bool) {
if (bool) {
STATE.inputFocusNeeded = bool;
return;
}
if (STATE.inputFocusNeeded) {
STATE.inputFocusNeeded = false;
return true;
}
}
function splitSearchString (searchString) {
const words = searchString.string.split(' ');
if (words.length === 1) {
return [searchString];
}
return words.map((string, i) => ({
string,
distance: 1,
focus: false,
}));
}
function HTMLElement (name, value) {
if (value) {
return STATE.HTMLElements[name] = value;
}
return STATE.HTMLElements[name];
}
/* harmony default export */ __webpack_exports__["default"] = (store);
/***/ }),
/***/ "./src/page/ui/components/controls/index.js":
/*!**************************************************!*\
!*** ./src/page/ui/components/controls/index.js ***!
\**************************************************/
/*! no exports provided */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var couli__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! couli */ "../couli/dist/couli.js");
/* harmony import */ var couli__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(couli__WEBPACK_IMPORTED_MODULE_0__);
/* harmony import */ var _store__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../../../store */ "./src/page/store.js");
/* harmony import */ var _common_constants__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../../../common/constants */ "./src/common/constants.js");
/* harmony import */ var _markup_html__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./markup.html */ "./src/page/ui/components/controls/markup.html");
/* harmony import */ var _markup_html__WEBPACK_IMPORTED_MODULE_3___default = /*#__PURE__*/__webpack_require__.n(_markup_html__WEBPACK_IMPORTED_MODULE_3__);
/* harmony import */ var _styles__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./styles */ "./src/page/ui/components/controls/styles.js");
const APP_WIDTH = 390;
const MAX_INPUT_WIDTH = 245;
couli__WEBPACK_IMPORTED_MODULE_0___default.a.define('controls', _markup_html__WEBPACK_IMPORTED_MODULE_3___default.a, {
lastFocused: {},
searchHappened: {},
caseSensitive: {},
color: {},
blinkSet: {},
foundResults: {
html: ($) => {
if (!$.searchHappened) {
return;
}
let resultString = $.foundResults + ' results';
if ($.foundResults) {
resultString = ' of ' + resultString;
}
return resultString;
}
},
searchButton: [{ click: (e, el, ci) => _store__WEBPACK_IMPORTED_MODULE_1__["default"].startSearch().then(() => ci.markup('searchId').focus()) }],
searchId: {
events: {
click: () => _store__WEBPACK_IMPORTED_MODULE_1__["default"].switchBlink(),
contextmenu: (e, el, ci) => {
e.preventDefault();
_store__WEBPACK_IMPORTED_MODULE_1__["default"].removeSearch({ all: e.shiftKey }).then(() => ci.markup('searchId').focus());
},
keyup: (e, el, ci) => {
const { SHIFT, CTRL, ALT, BACKSPACE } = _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"];
if (Object.values(_common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"]).indexOf(e.keyCode) < 0 || [SHIFT, CTRL, ALT, BACKSPACE].includes(e.keyCode)) {
return;
}
switch (e.keyCode) {
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].UP:
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].s:
_store__WEBPACK_IMPORTED_MODULE_1__["default"].moveThroughSearch({ highlightPosition: ci.get('highlightPosition') - 1 }, true);
return;
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].DOWN:
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].d:
_store__WEBPACK_IMPORTED_MODULE_1__["default"].moveThroughSearch({ highlightPosition: ci.get('highlightPosition') + 1 }, true);
return;
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].ENTER:
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].e:
_store__WEBPACK_IMPORTED_MODULE_1__["default"].startSearch().then(() => ci.markup('searchId').focus());
return;
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].DELETE:
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].r:
_store__WEBPACK_IMPORTED_MODULE_1__["default"].removeSearch({ all: e.shiftKey }).then(() => ci.markup('searchId').focus());
return;
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].w:
ci.down('searchStrings').get(0).markup('string').focus();
return;
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].LEFT:
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].a:
_store__WEBPACK_IMPORTED_MODULE_1__["default"].moveThroughSearch({ searchId: ci.get('searchId') - 1 });
return;
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].RIGHT:
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].f:
_store__WEBPACK_IMPORTED_MODULE_1__["default"].moveThroughSearch({ searchId: ci.get('searchId') + 1 });
return;
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].c:
_store__WEBPACK_IMPORTED_MODULE_1__["default"].switchCaseSensitivity();
return;
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].b:
_store__WEBPACK_IMPORTED_MODULE_1__["default"].switchBlink()
return;
case _common_constants__WEBPACK_IMPORTED_MODULE_2__["KEYBOARD_KEYS"].ESC:
_store__WEBPACK_IMPORTED_MODULE_1__["default"].closePopup();
return;
default:
_store__WEBPACK_IMPORTED_MODULE_1__["default"].moveThroughSearch({ searchId: e.keyCode - 48 });
}
}
},
style: ($) => ({
backgroundColor: 'rgba(' + $.color + ', 0.35)',
})
},
moveLeft: [{ click: (e, el, ci) => _store__WEBPACK_IMPORTED_MODULE_1__["default"].moveThroughSearch({ searchId: ci.get('searchId') - 1 }) }],
moveRight: [{ click: (e, el, ci) => _store__WEBPACK_IMPORTED_MODULE_1__["default"].moveThroughSearch({ searchId: ci.get('searchId') + 1 }) }],
removeSearch: {
events: {
click: (e, el, ci) => _store__WEBPACK_IMPORTED_MODULE_1__["default"].removeSearch({ all: e.shiftKey }).then(() => ci.markup('searchId').focus())
},
class: ($) => ({
hidden: $.searchStrings.length === 1 && !$.searchStrings[0].string.length && !$.searchHappened
})
},
caseButton: {
events: {
click: () => _store__WEBPACK_IMPORTED_MODULE_1__["default"].switchCaseSensitivity(),
},
style: ($) => ({
backgroundColor: $.caseSensitive ? 'PaleGreen' : 'white',
})
},
results: { class: ($) => ({ hidden: !$.searchHappened }) },
controls: ($) => ({ hidden: !$.foundResults }),
moveUp: [{ click: (e, el, ci) => _store__WEBPACK_IMPORTED_MODULE_1__["default"].moveThroughSearch({ highlightPosition: ci.get('highlightPosition') - 1 }, true) }],
moveDown: [{ click: (e, el, ci) => _store__WEBPACK_IMPORTED_MODULE_1__["default"].moveThroughSearch({ highlightPosition: ci.get('highlightPosition') + 1 }, true) }],
blink: {
events: { click: () => _store__WEBPACK_IMPORTED_MODULE_1__["default"].switchBlink() },
style: ($) => ({
backgroundColor: $.blinkSet ? `rgba(${ $.color }, 0.35)` : 'white',
})
},
'': {
hooks: {
mount: (el, data, ci) => {
ci.set( _store__WEBPACK_IMPORTED_MODULE_1__["default"].getCurrentSearch() );
_store__WEBPACK_IMPORTED_MODULE_1__["default"].HTMLElement('searchId', ci.markup('searchId'));
_store__WEBPACK_IMPORTED_MODULE_1__["default"].addListener((store) => {
ci.set( store.getCurrentSearch() );
if (store.inputFocusNeeded()) {
ci.down('searchStrings').get(0).markup('string').focus();
}
});
}
}
}
}, Object(_styles__WEBPACK_IMPORTED_MODULE_4__["default"])({ APP_WIDTH, MAX_INPUT_WIDTH }));
/***/ }),
/***/ "./src/page/ui/components/controls/markup.html":
/*!*****************************************************!*\
!*** ./src/page/ui/components/controls/markup.html ***!
\*****************************************************/
/*! no static exports found */
/***/ (function(module, exports) {
module.exports = "<div>\r\n <ul x-lb=\"searchStrings\">\r\n <search-string>\r\n </ul>\r\n <div class=\"upperButtons\">\r\n <button x-b=\"searchButton\">Find</button>\r\n <button x-b=\"caseButton\" title=\"Case-sensitive on/off\">C</button>\r\n <div class=\"search-constrols\">\r\n <button x-b=\"moveLeft\" tabindex=\"-1\"></button>\r\n <button x-b=\"searchId\" tabindex=\"0\"></button>\r\n <button x-b=\"removeSearch\" tabindex=\"-1\" title=\"Remove current search\"></button>\r\n <button x-b=\"moveRight\" tabindex=\"-1\"></button>\r\n </div>\r\n </div>\r\n\r\n <div x-b=\"results\" >\r\n <span x-b=\"foundResults\"></span>\r\n <div x-b=\"controls\" class=\"hidden\">\r\n <button x-b=\"blink\" title=\"Get findings to blink\">Blink</button>\r\n <button x-b=\"moveUp\"></button>\r\n <button x-b=\"moveDown\"></button>\r\n <span x-b=\"highlightPosition\"></span>\r\n </div>\r\n </div>\r\n</div>\r\n";
/***/ }),
/***/ "./src/page/ui/components/controls/styles.js":
/*!***************************************************!*\
!*** ./src/page/ui/components/controls/styles.js ***!
\***************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _common_constants__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../../../common/constants */ "./src/common/constants.js");
/* harmony default export */ __webpack_exports__["default"] = ((vars) => ({
'': {
fontSize: 15,
boxSizing: 'border-box',
fontFamily: 'sans-serif',
width: vars.APP_WIDTH,
marginTop: 18,
marginLeft: 10,
marginRight: 10,
color: 'black',
lineHeight: 'normal',
},
'button': {
background: 'none',
fontSize: 15,
'-webkit-appearance': 'none',
border: '2px solid black',
cursor: 'pointer',
color: 'black',
fontWeight: '400',
fontFamily: 'sans-serif',
padding: '0 8px',
lineHeight: 'normal',
wordWrap: 'normal',
borderRadius: 0,
height: 24,
minHeight: 'auto',
boxShadow: 'none',
},
'input': {
fontFamily: 'sans-serif',
lineHeight: 'normal',
padding: '1px 1px !important',
fontSize: 15,
height: 24,
},
'button:hover': {
color: 'DarkGreen',
borderColor: 'LimeGreen',
backgroundColor: 'transparent',
boxShadow: 'none',
},
'button:focus': {
outline: 'none',
},
'button:active': {
outline: '1px solid LimeGreen',
},
'.upperButtons': {
float: 'right',
marginTop: 5,
},
searchStrings: {
listStyle: 'none',
margin: 0,
padding: 0,
display: 'inline-block',
width: vars.MAX_INPUT_WIDTH,
},
'.searchString:first-child': {
marginLeft: 0,
},
searchButton: {
width: 50,
},
caseButton: {
width: 22,
marginLeft: 2,
paddingLeft: 0,
paddingRight: 1,
},
'.search-constrols': {
display: 'inline-block',
position: 'relative',
},
searchId: {
width: 22,
paddingLeft: 1,
paddingRight: 1,
},
removeSearch: {
position: 'absolute',
padding: 0,
width: 14,
height: 14,
top: -16,
right: -5,
},
'removeSearch::before, removeSearch::after': {
content: "''",
display: 'block',
position: 'absolute',
background: 'black',
width: '100%',
height: 2,
top: 4,
cursor: 'pointer',
},
'removeSearch:hover': {
borderColor: 'OrangeRed',
},
'removeSearch:hover::before, removeSearch:hover::after': {
background: 'DarkRed'
},
'removeSearch::before': {
transform: 'rotate(45deg)',
},
'removeSearch::after': {
transform: 'rotate(-45deg)',
},
'searchId:focus': {
color: 'DarkGreen',
borderColor: 'LimeGreen',
outline: '1px solid LimeGreen',
animation: `'${_common_constants__WEBPACK_IMPORTED_MODULE_0__["EXTENSION_ID"]}_outline_blinker' 1.3s linear infinite`
},
'.hidden': {
display: 'none'
},
results: {
paddingTop: 3,
paddingBottom: 9,
overflow: 'auto',
},
foundResults: {
display: 'inline-block',
paddingTop: 9,
paddingRight: 12,
verticalAlign: 'middle',
},
highlightPosition: {
display: 'inline-block',
paddingTop: 9,
paddingRight: 3,
paddingLeft: 5,
verticalAlign: 'middle',
},
'foundResults, controls': {
float: 'right',
},
'blink': {
height: 23,
verticalAlign: 'bottom',
marginRight: 5,
},
'moveUp, moveDown': {
width: 32,
height: 23,
position: 'relative',
},
'moveUp::before, moveUp::after, moveDown::before, moveDown::after': {
content: "''",
display: 'block',
position: 'absolute',
background: 'black',
width: 13,
height: 3,
top: 8
},
'moveUp:hover::before, moveUp:hover::after, moveDown:hover::before, moveDown:hover::after': {
background: 'DarkGreen'
},
'moveUp::before, moveDown::before': {
left: 4
},
'moveUp::after, moveDown::after': {
right: 4
},
'moveDown::before' : {
transform: 'rotate(45deg)'
},
'moveDown::after' : {
transform: 'rotate(-45deg)'
},
'moveUp::before' : {
transform: 'rotate(-45deg)'
},
'moveUp::after' : {
transform: 'rotate(45deg)'
},
'moveLeft:hover': {
borderTopColor: 'transparent',
borderBottomColor: 'transparent',
},
'moveRight:hover': {
borderTopColor: 'transparent',
borderBottomColor: 'transparent',
},
moveLeft: {
padding: 0,
verticalAlign: 'top',
marginTop: 5,
height: 14,
border: 'none',
borderTop: '7px solid transparent',
borderRight: '7px solid black',
borderBottom: '7px solid transparent',
},
moveRight: {
padding: 0,
verticalAlign: 'top',
marginTop: 5,
height: 14,
border: 'none',
borderTop: '7px solid transparent',
borderLeft: '7px solid black',
borderBottom: '7px solid transparent',
},
}));
/***/ }),
/***/ "./src/page/ui/components/highlighting/styles.js":
/*!*******************************************************!*\
!*** ./src/page/ui/components/highlighting/styles.js ***!
\*******************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony default export */ __webpack_exports__["default"] = ((vars) => ({
position: 'absolute',
height: vars.rect.height + 'px',
width: vars.rect.width + 'px',
top: vars.scrollTop + vars.rect.y + 'px',
left: vars.scrollLeft + vars.rect.x + 'px',
'z-index': 2147483645,
'pointer-events': 'none',
background: 'rgba(' + vars.color + ', 0.25)',
'outline-width': '3px',
}));
/***/ }),
/***/ "./src/page/ui/components/popup/index.js":
/*!***********************************************!*\
!*** ./src/page/ui/components/popup/index.js ***!
\***********************************************/
/*! no exports provided */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var couli__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! couli */ "../couli/dist/couli.js");
/* harmony import */ var couli__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(couli__WEBPACK_IMPORTED_MODULE_0__);
/* harmony import */ var _styles__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./styles */ "./src/page/ui/components/popup/styles.js");
/* harmony import */ var _store__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../../store */ "./src/page/store.js");
const INITIAL_POPUP_POSITION = { top: 20, left: 'auto', right: 20 };
couli__WEBPACK_IMPORTED_MODULE_0___default.a.define('popup',
`<div>
<div x-b="topbar">
<ul x-lb="searches">
<li x-b="string">
</ul>
<div x-b="closeButton"></div>
</div>
<controls x-state-name="controls"></controls>
</div>`, {
open: {
hooks: {
update: (el, value, ci) => {
if (value) {
ci.down('controls').down('searchStrings').get(0).markup('string').focus();
}
}
}
},
searches: {
listItem: {
state: {
color: {},
idx: {},
string: {
attrs: ($) => ({ title: $.string }),
style: ($) => ({ backgroundColor: 'rgba(' + $.color + ', 0.5)', }),
html: () => '',
events: {
click: (e, el, ci) => _store__WEBPACK_IMPORTED_MODULE_2__["default"].moveThroughSearch({ searchId: ci.get('idx') }, true),
contextmenu: (e, el, ci) => {
e.preventDefault();
_store__WEBPACK_IMPORTED_MODULE_2__["default"].removeSearch({ all: e.shiftKey, idx: ci.get('idx') }).then(() =>
ci.down('controls').down('searchStrings').get(0).markup('string').focus()
);
},
}
}
}
}
},
topbar: {
events: {
mousedown: (e, el, ci) => {
const popupEl = ci.markup();
const shiftX = e.clientX - popupEl.getBoundingClientRect().left;
const shiftY = e.clientY - popupEl.getBoundingClientRect().top;
function onMouseMove(e) {
requestAnimationFrame(() => {
popupEl.style.left = e.clientX - shiftX + 'px';
popupEl.style.top = e.clientY - shiftY + 'px';
});
}
function onMouseUp (e) {
document.removeEventListener('mousemove', onMouseMove);
document.removeEventListener('mouseup', onMouseUp);
el.removeEventListener('mouseup', onMouseUp);
}
document.addEventListener('mouseup', onMouseUp);
el.addEventListener('mouseup', onMouseUp);
document.addEventListener('mousemove', onMouseMove);
},
dragstart: () => false
}
},
closeButton: {
events: {
click: () => _store__WEBPACK_IMPORTED_MODULE_2__["default"].closePopup()
}
},
'': {
style: ($) => {
const styles = { display: $.open ? 'block' : 'none' };
if ($.open) {
Object.assign(styles, INITIAL_POPUP_POSITION);
}
return styles;
},
hooks: {
mount: (el, data, ci) => {
ci.set({ open: true });
_store__WEBPACK_IMPORTED_MODULE_2__["default"].addListener((store) => {
const popupData = store.getPopupData();
if (ci.get('open') && popupData.open) {
delete popupData.open;
}
ci.set(popupData);
});
}
}
}
}, Object(_styles__WEBPACK_IMPORTED_MODULE_1__["default"])({ INITIAL_POPUP_POSITION }));
/***/ }),
/***/ "./src/page/ui/components/popup/styles.js":
/*!************************************************!*\
!*** ./src/page/ui/components/popup/styles.js ***!
\************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony default export */ __webpack_exports__["default"] = ((vars) => ({
'': {
width: 415,
position: 'fixed',
background: 'white',
border: '3px solid black',
zIndex: '2147483647',
boxSizing: 'border-box',
padding: 0,
...vars.INITIAL_POPUP_POSITION
},
topbar: {
height: 15,
borderBottom: '1px solid black',
cursor: 'grab',
position: 'relative',
background: '#d6f5d6',
boxSizing: 'border-box',
},
searches: {
margin: 0,
padding: 0,
listStyle: 'none',
},
'searches li': {
float: 'left',
width: 27,
height: 15,
borderRight: '1px solid black',
cursor: 'pointer',
},
closeButton: {
position: 'absolute',
top: 0,
right: 0,
bottom: 0,
width: 32,
cursor: 'pointer',
borderLeft: '1px solid black',
background: 'rgba(255, 79, 43, 0.85)',
},
'closeButton:hover': {
background: 'rgb(255, 79, 43)'
},
'closeButton::before, closeButton::after': {
content: "''",
display: 'block',
position: 'absolute',
background: 'black',
width: 11,
height: 2,
top: 6,
left: 11,
cursor: 'pointer',
},
'closeButton::before': {
transform: 'rotate(45deg)',
},
'closeButton::after': {
transform: 'rotate(-45deg)',
},
}));
/***/ }),
/***/ "./src/page/ui/components/scrollbarMark/styles.js":
/*!********************************************************!*\
!*** ./src/page/ui/components/scrollbarMark/styles.js ***!
\********************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony default export */ __webpack_exports__["default"] = ((vars) => ({
position: 'fixed',
height: '5px',
width: '15px',
top: window.innerHeight / document.body.scrollHeight * vars.topPosition + 'px',
right: '0px',
'z-index': 2147483645,
cursor: 'pointer',
background: 'rgba(' + vars.color + ', 0.5)',
}));
/***/ }),
/***/ "./src/page/ui/components/search-string/index.js":
/*!*******************************************************!*\
!*** ./src/page/ui/components/search-string/index.js ***!
\*******************************************************/
/*! no exports provided */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var couli__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! couli */ "../couli/dist/couli.js");
/* harmony import */ var couli__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(couli__WEBPACK_IMPORTED_MODULE_0__);
/* harmony import */ var _store__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../../../store */ "./src/page/store.js");
/* harmony import */ var _markup_html__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./markup.html */ "./src/page/ui/components/search-string/markup.html");
/* harmony import */ var _markup_html__WEBPACK_IMPORTED_MODULE_2___default = /*#__PURE__*/__webpack_require__.n(_markup_html__WEBPACK_IMPORTED_MODULE_2__);
/* harmony import */ var _styles__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./styles */ "./src/page/ui/components/search-string/styles.js");
const ACTIVE_SEARCH_INPUT_LENGTH = 150;
const MAX_INPUT_WIDTH = 255;
const MIN_INPUT_WIDTH = 110;
const SPACE_BETWEEN_INPUTS = 34;
const LETTER_WIDTH = 11;
const UNFOCUSED_INPUT_PADDING = 14;
const ADD_SIGN_PADDING = 12;
couli__WEBPACK_IMPORTED_MODULE_0___default.a.define('search-string', _markup_html__WEBPACK_IMPORTED_MODULE_2___default.a, {
focus: {},
string: {
events: {
keyup: (e) => {
e.stopPropagation();
_store__WEBPACK_IMPORTED_MODULE_1__["default"].handleInputActivity(e);
},
click: (e, el, ci, i) => _store__WEBPACK_IMPORTED_MODULE_1__["default"].handleInputActivity(e, i),
},
style: ($, ci) => {
let width;
if (!$.focus) {
width = ci.get('string').length * LETTER_WIDTH + UNFOCUSED_INPUT_PADDING
} else {
const unfocusedInputs = ci.up().filter((input) => !input.focus);
width = unfocusedInputs.reduce((focusedInputLength, input) => {
return focusedInputLength - input.string.length * LETTER_WIDTH - UNFOCUSED_INPUT_PADDING - SPACE_BETWEEN_INPUTS;
}, MAX_INPUT_WIDTH);
if (width < MIN_INPUT_WIDTH) {
width = MAX_INPUT_WIDTH;
}
width = width - ADD_SIGN_PADDING;
}
return { width };
},
},
distance: [
(item) => ({
hidden: item.distance === null,
}),
[{
keyup: (e, el, ci, i) => _store__WEBPACK_IMPORTED_MODULE_1__["default"].updateStringDistance(e.target.value, i)
}]
],
remove: {
events: {
click: () => _store__WEBPACK_IMPORTED_MODULE_1__["default"].removeSearchString()
},
class: ($) => ({ hidden: $.first || !$.focus }),
},
addNew: {
events: {
click: () => _store__WEBPACK_IMPORTED_MODULE_1__["default"].addNewSearchString(),
},
class: ($) => ({ hidden: !$.focus }),
},
focus: {
hooks: {
update: (el, value, ci) => {
if (value) {
setTimeout(() => ci.markup('string').focus(), 0);
}
}
}
}
}, Object(_styles__WEBPACK_IMPORTED_MODULE_3__["default"])({ SPACE_BETWEEN_INPUTS, ACTIVE_SEARCH_INPUT_LENGTH }));
/***/ }),
/***/ "./src/page/ui/components/search-string/markup.html":
/*!**********************************************************!*\
!*** ./src/page/ui/components/search-string/markup.html ***!
\**********************************************************/
/*! no static exports found */
/***/ (function(module, exports) {
module.exports = "<li>\r\n <input x-b=\"string\" type=\"text\" class=\"string\">\r\n <input x-b=\"distance\" type=\"text\" class=\"distance\" title=\"Distance between substrings\">\r\n <button x-b=\"remove\" tabindex=\"-1\" title=\"Remove substring\"></button>\r\n <button x-b=\"addNew\" tabindex=\"-1\" title=\"Add new substring\"></button>\r\n</li>\r\n";
/***/ }),
/***/ "./src/page/ui/components/search-string/styles.js":
/*!********************************************************!*\
!*** ./src/page/ui/components/search-string/styles.js ***!
\********************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony default export */ __webpack_exports__["default"] = ((vars) => ({
'': {
position: 'relative',
display: 'inline-block',
marginLeft: vars.SPACE_BETWEEN_INPUTS,
marginTop: 5,
marginBottom: 14,
},
string: {
boxSizing: 'border-box',
fontSize: 15,
width: vars.ACTIVE_SEARCH_INPUT_LENGTH,
'-webkit-appearance': 'none',
borderWidth: '0',
borderBottom: '2px solid black',
background: 'white',
color: 'black',
padding: 1,
},
'string:focus': {
outline: 'none',
borderColor: 'LimeGreen'
},
distance: {
width: vars.SPACE_BETWEEN_INPUTS - 10,
position: 'absolute',
left: -30,
top: -17,
'-webkit-appearance': 'none',
borderWidth: 0,
borderBottom: '1px solid black',
background: 'white',
color: 'black',
fontSize: 13,
height: 16,
},
'distance:focus': {
outline: 'none',
borderColor: 'LimeGreen'
},
'.hidden': {
display: 'none'
},
'addNew, remove': {
width: 10,
height: 10,
padding: 0,
position: 'absolute',
border: 'none',
},
addNew: {
right: -14,
bottom: 4,
},
remove: {
left: -14,
bottom: 4,
},
'addNew::before, addNew::after, remove::before, remove::after': {
content: "''",
display: 'block',
position: 'absolute',
background: 'black',
cursor: 'pointer',
width: '100%',
},
'addNew::before, addNew::after': {
background: 'DarkGreen',
height: 2,
top: 5,
},
'addNew:hover::before, addNew:hover::after': {
background: 'LimeGreen',
},
'remove::before, remove::after': {
background: 'DarkRed',
height: 2,
top: 5,
},
'remove:hover::before, remove:hover::after': {
background: 'OrangeRed',
},
'addNew::before': {
transform: 'rotate(90deg)',
},
'remove::before': {
transform: 'rotate(45deg)',
},
'remove::after': {
transform: 'rotate(-45deg)',
}
}));
/***/ }),
/***/ "./src/page/ui/index.js":
/*!******************************!*\
!*** ./src/page/ui/index.js ***!
\******************************/
/*! exports provided: createElement */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "createElement", function() { return createElement; });
function createElement (type, styles) {
const el = document.createElement(type);
for (let attr in styles) {
el.style.setProperty(attr, styles[attr], 'important');
}
return el;
}
/***/ })
/******/ }); | createBinding |
features.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Tidy check to ensure that unstable features are all in order
//! | //! * Library features have at most one stability level
//! * Library features have at most one `since` value
//! * All unstable lang features have tests to ensure they are actually unstable
use std::collections::HashMap;
use std::fmt;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
#[derive(Debug, PartialEq)]
pub enum Status {
Stable,
Removed,
Unstable,
}
impl fmt::Display for Status {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let as_str = match *self {
Status::Stable => "stable",
Status::Unstable => "unstable",
Status::Removed => "removed",
};
fmt::Display::fmt(as_str, f)
}
}
#[derive(Debug)]
pub struct Feature {
pub level: Status,
pub since: String,
pub has_gate_test: bool,
}
pub fn check(path: &Path, bad: &mut bool) {
let mut features = collect_lang_features(path);
assert!(!features.is_empty());
let lib_features = collect_lib_features(path, bad, &features);
assert!(!lib_features.is_empty());
let mut contents = String::new();
super::walk_many(&[&path.join("test/compile-fail"),
&path.join("test/compile-fail-fulldeps"),
&path.join("test/parse-fail"),],
&mut |path| super::filter_dirs(path),
&mut |file| {
let filename = file.file_name().unwrap().to_string_lossy();
if !filename.ends_with(".rs") || filename == "features.rs" ||
filename == "diagnostic_list.rs" {
return;
}
let filen_underscore = filename.replace("-","_").replace(".rs","");
let filename_is_gate_test = test_filen_gate(&filen_underscore, &mut features);
contents.truncate(0);
t!(t!(File::open(&file), &file).read_to_string(&mut contents));
for (i, line) in contents.lines().enumerate() {
let mut err = |msg: &str| {
tidy_error!(bad, "{}:{}: {}", file.display(), i + 1, msg);
};
let gate_test_str = "gate-test-";
if !line.contains(gate_test_str) {
continue;
}
let feature_name = match line.find(gate_test_str) {
Some(i) => {
&line[i+gate_test_str.len()..line[i+1..].find(' ').unwrap_or(line.len())]
},
None => continue,
};
match features.get_mut(feature_name) {
Some(f) => {
if filename_is_gate_test {
err(&format!("The file is already marked as gate test \
through its name, no need for a \
'gate-test-{}' comment",
feature_name));
}
f.has_gate_test = true;
}
None => {
err(&format!("gate-test test found referencing a nonexistent feature '{}'",
feature_name));
}
}
}
});
// Only check the number of lang features.
// Obligatory testing for library features is dumb.
let gate_untested = features.iter()
.filter(|&(_, f)| f.level == Status::Unstable)
.filter(|&(_, f)| !f.has_gate_test)
.collect::<Vec<_>>();
for &(name, _) in gate_untested.iter() {
println!("Expected a gate test for the feature '{}'.", name);
println!("Hint: create a file named 'feature-gate-{}.rs' in the compile-fail\
\n test suite, with its failures due to missing usage of\
\n #![feature({})].", name, name);
println!("Hint: If you already have such a test and don't want to rename it,\
\n you can also add a // gate-test-{} line to the test file.",
name);
}
if gate_untested.len() > 0 {
tidy_error!(bad, "Found {} features without a gate test.", gate_untested.len());
}
if *bad {
return;
}
let mut lines = Vec::new();
for (name, feature) in features.iter() {
lines.push(format!("{:<32} {:<8} {:<12} {:<8}",
name,
"lang",
feature.level,
feature.since));
}
for (name, feature) in lib_features {
lines.push(format!("{:<32} {:<8} {:<12} {:<8}",
name,
"lib",
feature.level,
feature.since));
}
lines.sort();
for line in lines {
println!("* {}", line);
}
}
fn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> {
line.find(attr)
.and_then(|i| line[i..].find('"').map(|j| i + j + 1))
.and_then(|i| line[i..].find('"').map(|j| (i, i + j)))
.map(|(i, j)| &line[i..j])
}
fn test_filen_gate(filen_underscore: &str,
features: &mut HashMap<String, Feature>) -> bool {
if filen_underscore.starts_with("feature_gate") {
for (n, f) in features.iter_mut() {
if filen_underscore == format!("feature_gate_{}", n) {
f.has_gate_test = true;
return true;
}
}
}
return false;
}
pub fn collect_lang_features(base_src_path: &Path) -> HashMap<String, Feature> {
let mut contents = String::new();
let path = base_src_path.join("libsyntax/feature_gate.rs");
t!(t!(File::open(path)).read_to_string(&mut contents));
contents.lines()
.filter_map(|line| {
let mut parts = line.trim().split(",");
let level = match parts.next().map(|l| l.trim().trim_left_matches('(')) {
Some("active") => Status::Unstable,
Some("removed") => Status::Removed,
Some("accepted") => Status::Stable,
_ => return None,
};
let name = parts.next().unwrap().trim();
let since = parts.next().unwrap().trim().trim_matches('"');
Some((name.to_owned(),
Feature {
level: level,
since: since.to_owned(),
has_gate_test: false,
}))
})
.collect()
}
pub fn collect_lib_features(base_src_path: &Path,
bad: &mut bool,
features: &HashMap<String, Feature>) -> HashMap<String, Feature> {
let mut lib_features = HashMap::<String, Feature>::new();
let mut contents = String::new();
super::walk(base_src_path,
&mut |path| super::filter_dirs(path) || path.ends_with("src/test"),
&mut |file| {
let filename = file.file_name().unwrap().to_string_lossy();
if !filename.ends_with(".rs") || filename == "features.rs" ||
filename == "diagnostic_list.rs" {
return;
}
contents.truncate(0);
t!(t!(File::open(&file), &file).read_to_string(&mut contents));
for (i, line) in contents.lines().enumerate() {
let mut err = |msg: &str| {
tidy_error!(bad, "{}:{}: {}", file.display(), i + 1, msg);
};
let level = if line.contains("[unstable(") {
Status::Unstable
} else if line.contains("[stable(") {
Status::Stable
} else {
continue;
};
let feature_name = match find_attr_val(line, "feature") {
Some(name) => name,
None => {
err("malformed stability attribute");
continue;
}
};
let since = match find_attr_val(line, "since") {
Some(name) => name,
None if level == Status::Stable => {
err("malformed stability attribute");
continue;
}
None => "None",
};
if features.contains_key(feature_name) {
err("duplicating a lang feature");
}
if let Some(ref s) = lib_features.get(feature_name) {
if s.level != level {
err("different stability level than before");
}
if s.since != since {
err("different `since` than before");
}
continue;
}
lib_features.insert(feature_name.to_owned(),
Feature {
level: level,
since: since.to_owned(),
has_gate_test: false,
});
}
});
lib_features
} | //! This check will ensure properties like:
//!
//! * All stability attributes look reasonably well formed
//! * The set of library features is disjoint from the set of language features |
log.rs | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::f64::consts::E;
use std::fmt;
use std::marker::PhantomData;
use std::sync::Arc;
use common_datavalues::prelude::*;
use common_datavalues::with_match_primitive_type_id;
use common_exception::Result;
use num_traits::AsPrimitive;
use crate::scalars::assert_numeric;
use crate::scalars::function_factory::FunctionFeatures;
use crate::scalars::EvalContext;
use crate::scalars::Function;
use crate::scalars::FunctionDescription;
use crate::scalars::ScalarBinaryExpression; |
/// Const f64 is now allowed.
/// feature(adt_const_params) is not stable & complete
pub trait Base: Send + Sync + Clone + 'static {
fn base() -> f64;
}
#[derive(Clone)]
pub struct EBase;
#[derive(Clone)]
pub struct TenBase;
#[derive(Clone)]
pub struct TwoBase;
impl Base for EBase {
fn base() -> f64 {
E
}
}
impl Base for TenBase {
fn base() -> f64 {
10f64
}
}
impl Base for TwoBase {
fn base() -> f64 {
2f64
}
}
#[derive(Clone)]
pub struct GenericLogFunction<T> {
display_name: String,
t: PhantomData<T>,
}
impl<T: Base> GenericLogFunction<T> {
pub fn try_create(display_name: &str) -> Result<Box<dyn Function>> {
Ok(Box::new(Self {
display_name: display_name.to_string(),
t: PhantomData,
}))
}
pub fn desc() -> FunctionDescription {
FunctionDescription::creator(Box::new(Self::try_create)).features(
FunctionFeatures::default()
.deterministic()
.variadic_arguments(1, 2),
)
}
fn log<S>(value: S, _ctx: &mut EvalContext) -> f64
where S: AsPrimitive<f64> {
value.as_().log(T::base())
}
fn log_with_base<S, B>(base: S, value: B, _ctx: &mut EvalContext) -> f64
where
S: AsPrimitive<f64>,
B: AsPrimitive<f64>,
{
value.as_().log(base.as_())
}
}
impl<T: Base> Function for GenericLogFunction<T> {
fn name(&self) -> &str {
&*self.display_name
}
fn return_type(&self, args: &[&DataTypePtr]) -> Result<DataTypePtr> {
for arg in args {
assert_numeric(*arg)?;
}
Ok(f64::to_data_type())
}
fn eval(&self, columns: &ColumnsWithField, _input_rows: usize) -> Result<ColumnRef> {
let mut ctx = EvalContext::default();
if columns.len() == 1 {
with_match_primitive_type_id!(columns[0].data_type().data_type_id(), |$S| {
let unary = ScalarUnaryExpression::<$S, f64, _>::new(Self::log);
let col = unary.eval(columns[0].column(), &mut ctx)?;
Ok(Arc::new(col))
},{
unreachable!()
})
} else {
with_match_primitive_type_id!(columns[0].data_type().data_type_id(), |$S| {
with_match_primitive_type_id!(columns[1].data_type().data_type_id(), |$T| {
let binary = ScalarBinaryExpression::<$S, $T, f64, _>::new(Self::log_with_base);
let col = binary.eval(columns[0].column(), columns[1].column(), &mut ctx)?;
Ok(Arc::new(col))
},{
unreachable!()
})
},{
unreachable!()
})
}
}
}
impl<T: Base> fmt::Display for GenericLogFunction<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.display_name.to_uppercase())
}
}
pub type LnFunction = GenericLogFunction<EBase>;
pub type LogFunction = GenericLogFunction<EBase>;
pub type Log10Function = GenericLogFunction<TenBase>;
pub type Log2Function = GenericLogFunction<TwoBase>; | use crate::scalars::ScalarUnaryExpression; |
tcr4.rs | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::TCR4 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `FSD`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum FSDR {
#[doc = "Frame sync is generated externally in Slave mode."]
_0,
#[doc = "Frame sync is generated internally in Master mode."]
_1,
}
impl FSDR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
FSDR::_0 => false,
FSDR::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> FSDR {
match value {
false => FSDR::_0,
true => FSDR::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == FSDR::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool |
}
#[doc = "Possible values of the field `FSP`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum FSPR {
#[doc = "Frame sync is active high."]
_0,
#[doc = "Frame sync is active low."]
_1,
}
impl FSPR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
FSPR::_0 => false,
FSPR::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> FSPR {
match value {
false => FSPR::_0,
true => FSPR::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == FSPR::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == FSPR::_1
}
}
#[doc = "Possible values of the field `ONDEM`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ONDEMR {
#[doc = "Internal frame sync is generated continuously."]
_0,
#[doc = "Internal frame sync is generated when the FIFO warning flag is clear."]
_1,
}
impl ONDEMR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
ONDEMR::_0 => false,
ONDEMR::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> ONDEMR {
match value {
false => ONDEMR::_0,
true => ONDEMR::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == ONDEMR::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == ONDEMR::_1
}
}
#[doc = "Possible values of the field `FSE`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum FSER {
#[doc = "Frame sync asserts with the first bit of the frame."]
_0,
#[doc = "Frame sync asserts one bit before the first bit of the frame."]
_1,
}
impl FSER {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
FSER::_0 => false,
FSER::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> FSER {
match value {
false => FSER::_0,
true => FSER::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == FSER::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == FSER::_1
}
}
#[doc = "Possible values of the field `MF`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum MFR {
#[doc = "LSB is transmitted first."]
_0,
#[doc = "MSB is transmitted first."]
_1,
}
impl MFR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
MFR::_0 => false,
MFR::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> MFR {
match value {
false => MFR::_0,
true => MFR::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == MFR::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == MFR::_1
}
}
#[doc = r" Value of the field"]
pub struct SYWDR {
bits: u8,
}
impl SYWDR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct FRSZR {
bits: u8,
}
impl FRSZR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = "Possible values of the field `FPACK`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum FPACKR {
#[doc = "FIFO packing is disabled"]
_00,
#[doc = "8-bit FIFO packing is enabled"]
_10,
#[doc = "16-bit FIFO packing is enabled"]
_11,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl FPACKR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
FPACKR::_00 => 0,
FPACKR::_10 => 2,
FPACKR::_11 => 3,
FPACKR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> FPACKR {
match value {
0 => FPACKR::_00,
2 => FPACKR::_10,
3 => FPACKR::_11,
i => FPACKR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `_00`"]
#[inline]
pub fn is_00(&self) -> bool {
*self == FPACKR::_00
}
#[doc = "Checks if the value of the field is `_10`"]
#[inline]
pub fn is_10(&self) -> bool {
*self == FPACKR::_10
}
#[doc = "Checks if the value of the field is `_11`"]
#[inline]
pub fn is_11(&self) -> bool {
*self == FPACKR::_11
}
}
#[doc = "Possible values of the field `FCOMB`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum FCOMBR {
#[doc = "FIFO combine mode disabled."]
_00,
#[doc = "FIFO combine mode enabled on FIFO reads (from transmit shift registers)."]
_01,
#[doc = "FIFO combine mode enabled on FIFO writes (by software)."]
_10,
#[doc = "FIFO combine mode enabled on FIFO reads (from transmit shift registers) and writes (by software)."]
_11,
}
impl FCOMBR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
FCOMBR::_00 => 0,
FCOMBR::_01 => 1,
FCOMBR::_10 => 2,
FCOMBR::_11 => 3,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> FCOMBR {
match value {
0 => FCOMBR::_00,
1 => FCOMBR::_01,
2 => FCOMBR::_10,
3 => FCOMBR::_11,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `_00`"]
#[inline]
pub fn is_00(&self) -> bool {
*self == FCOMBR::_00
}
#[doc = "Checks if the value of the field is `_01`"]
#[inline]
pub fn is_01(&self) -> bool {
*self == FCOMBR::_01
}
#[doc = "Checks if the value of the field is `_10`"]
#[inline]
pub fn is_10(&self) -> bool {
*self == FCOMBR::_10
}
#[doc = "Checks if the value of the field is `_11`"]
#[inline]
pub fn is_11(&self) -> bool {
*self == FCOMBR::_11
}
}
#[doc = "Possible values of the field `FCONT`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum FCONTR {
#[doc = "On FIFO error, the SAI will continue from the start of the next frame after the FIFO error flag has been cleared."]
_0,
#[doc = "On FIFO error, the SAI will continue from the same word that caused the FIFO error to set after the FIFO warning flag has been cleared."]
_1,
}
impl FCONTR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
FCONTR::_0 => false,
FCONTR::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> FCONTR {
match value {
false => FCONTR::_0,
true => FCONTR::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == FCONTR::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == FCONTR::_1
}
}
#[doc = "Values that can be written to the field `FSD`"]
pub enum FSDW {
#[doc = "Frame sync is generated externally in Slave mode."]
_0,
#[doc = "Frame sync is generated internally in Master mode."]
_1,
}
impl FSDW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
FSDW::_0 => false,
FSDW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _FSDW<'a> {
w: &'a mut W,
}
impl<'a> _FSDW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: FSDW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Frame sync is generated externally in Slave mode."]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(FSDW::_0)
}
#[doc = "Frame sync is generated internally in Master mode."]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(FSDW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `FSP`"]
pub enum FSPW {
#[doc = "Frame sync is active high."]
_0,
#[doc = "Frame sync is active low."]
_1,
}
impl FSPW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
FSPW::_0 => false,
FSPW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _FSPW<'a> {
w: &'a mut W,
}
impl<'a> _FSPW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: FSPW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Frame sync is active high."]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(FSPW::_0)
}
#[doc = "Frame sync is active low."]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(FSPW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `ONDEM`"]
pub enum ONDEMW {
#[doc = "Internal frame sync is generated continuously."]
_0,
#[doc = "Internal frame sync is generated when the FIFO warning flag is clear."]
_1,
}
impl ONDEMW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
ONDEMW::_0 => false,
ONDEMW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _ONDEMW<'a> {
w: &'a mut W,
}
impl<'a> _ONDEMW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: ONDEMW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Internal frame sync is generated continuously."]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(ONDEMW::_0)
}
#[doc = "Internal frame sync is generated when the FIFO warning flag is clear."]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(ONDEMW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `FSE`"]
pub enum FSEW {
#[doc = "Frame sync asserts with the first bit of the frame."]
_0,
#[doc = "Frame sync asserts one bit before the first bit of the frame."]
_1,
}
impl FSEW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
FSEW::_0 => false,
FSEW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _FSEW<'a> {
w: &'a mut W,
}
impl<'a> _FSEW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: FSEW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Frame sync asserts with the first bit of the frame."]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(FSEW::_0)
}
#[doc = "Frame sync asserts one bit before the first bit of the frame."]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(FSEW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `MF`"]
pub enum MFW {
#[doc = "LSB is transmitted first."]
_0,
#[doc = "MSB is transmitted first."]
_1,
}
impl MFW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
MFW::_0 => false,
MFW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _MFW<'a> {
w: &'a mut W,
}
impl<'a> _MFW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: MFW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "LSB is transmitted first."]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(MFW::_0)
}
#[doc = "MSB is transmitted first."]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(MFW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SYWDW<'a> {
w: &'a mut W,
}
impl<'a> _SYWDW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 31;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FRSZW<'a> {
w: &'a mut W,
}
impl<'a> _FRSZW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 31;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `FPACK`"]
pub enum FPACKW {
#[doc = "FIFO packing is disabled"]
_00,
#[doc = "8-bit FIFO packing is enabled"]
_10,
#[doc = "16-bit FIFO packing is enabled"]
_11,
}
impl FPACKW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
FPACKW::_00 => 0,
FPACKW::_10 => 2,
FPACKW::_11 => 3,
}
}
}
#[doc = r" Proxy"]
pub struct _FPACKW<'a> {
w: &'a mut W,
}
impl<'a> _FPACKW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: FPACKW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "FIFO packing is disabled"]
#[inline]
pub fn _00(self) -> &'a mut W {
self.variant(FPACKW::_00)
}
#[doc = "8-bit FIFO packing is enabled"]
#[inline]
pub fn _10(self) -> &'a mut W {
self.variant(FPACKW::_10)
}
#[doc = "16-bit FIFO packing is enabled"]
#[inline]
pub fn _11(self) -> &'a mut W {
self.variant(FPACKW::_11)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 3;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `FCOMB`"]
pub enum FCOMBW {
#[doc = "FIFO combine mode disabled."]
_00,
#[doc = "FIFO combine mode enabled on FIFO reads (from transmit shift registers)."]
_01,
#[doc = "FIFO combine mode enabled on FIFO writes (by software)."]
_10,
#[doc = "FIFO combine mode enabled on FIFO reads (from transmit shift registers) and writes (by software)."]
_11,
}
impl FCOMBW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
FCOMBW::_00 => 0,
FCOMBW::_01 => 1,
FCOMBW::_10 => 2,
FCOMBW::_11 => 3,
}
}
}
#[doc = r" Proxy"]
pub struct _FCOMBW<'a> {
w: &'a mut W,
}
impl<'a> _FCOMBW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: FCOMBW) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "FIFO combine mode disabled."]
#[inline]
pub fn _00(self) -> &'a mut W {
self.variant(FCOMBW::_00)
}
#[doc = "FIFO combine mode enabled on FIFO reads (from transmit shift registers)."]
#[inline]
pub fn _01(self) -> &'a mut W {
self.variant(FCOMBW::_01)
}
#[doc = "FIFO combine mode enabled on FIFO writes (by software)."]
#[inline]
pub fn _10(self) -> &'a mut W {
self.variant(FCOMBW::_10)
}
#[doc = "FIFO combine mode enabled on FIFO reads (from transmit shift registers) and writes (by software)."]
#[inline]
pub fn _11(self) -> &'a mut W {
self.variant(FCOMBW::_11)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 3;
const OFFSET: u8 = 26;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `FCONT`"]
pub enum FCONTW {
#[doc = "On FIFO error, the SAI will continue from the start of the next frame after the FIFO error flag has been cleared."]
_0,
#[doc = "On FIFO error, the SAI will continue from the same word that caused the FIFO error to set after the FIFO warning flag has been cleared."]
_1,
}
impl FCONTW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
FCONTW::_0 => false,
FCONTW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _FCONTW<'a> {
w: &'a mut W,
}
impl<'a> _FCONTW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: FCONTW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "On FIFO error, the SAI will continue from the start of the next frame after the FIFO error flag has been cleared."]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(FCONTW::_0)
}
#[doc = "On FIFO error, the SAI will continue from the same word that caused the FIFO error to set after the FIFO warning flag has been cleared."]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(FCONTW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 28;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Frame Sync Direction"]
#[inline]
pub fn fsd(&self) -> FSDR {
FSDR::_from({
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 1 - Frame Sync Polarity"]
#[inline]
pub fn fsp(&self) -> FSPR {
FSPR::_from({
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 2 - On Demand Mode"]
#[inline]
pub fn ondem(&self) -> ONDEMR {
ONDEMR::_from({
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 3 - Frame Sync Early"]
#[inline]
pub fn fse(&self) -> FSER {
FSER::_from({
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 4 - MSB First"]
#[inline]
pub fn mf(&self) -> MFR {
MFR::_from({
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bits 8:12 - Sync Width"]
#[inline]
pub fn sywd(&self) -> SYWDR {
let bits = {
const MASK: u8 = 31;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) as u8
};
SYWDR { bits }
}
#[doc = "Bits 16:20 - Frame size"]
#[inline]
pub fn frsz(&self) -> FRSZR {
let bits = {
const MASK: u8 = 31;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) as u8
};
FRSZR { bits }
}
#[doc = "Bits 24:25 - FIFO Packing Mode"]
#[inline]
pub fn fpack(&self) -> FPACKR {
FPACKR::_from({
const MASK: u8 = 3;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bits 26:27 - FIFO Combine Mode"]
#[inline]
pub fn fcomb(&self) -> FCOMBR {
FCOMBR::_from({
const MASK: u8 = 3;
const OFFSET: u8 = 26;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 28 - FIFO Continue on Error"]
#[inline]
pub fn fcont(&self) -> FCONTR {
FCONTR::_from({
const MASK: bool = true;
const OFFSET: u8 = 28;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Frame Sync Direction"]
#[inline]
pub fn fsd(&mut self) -> _FSDW {
_FSDW { w: self }
}
#[doc = "Bit 1 - Frame Sync Polarity"]
#[inline]
pub fn fsp(&mut self) -> _FSPW {
_FSPW { w: self }
}
#[doc = "Bit 2 - On Demand Mode"]
#[inline]
pub fn ondem(&mut self) -> _ONDEMW {
_ONDEMW { w: self }
}
#[doc = "Bit 3 - Frame Sync Early"]
#[inline]
pub fn fse(&mut self) -> _FSEW {
_FSEW { w: self }
}
#[doc = "Bit 4 - MSB First"]
#[inline]
pub fn mf(&mut self) -> _MFW {
_MFW { w: self }
}
#[doc = "Bits 8:12 - Sync Width"]
#[inline]
pub fn sywd(&mut self) -> _SYWDW {
_SYWDW { w: self }
}
#[doc = "Bits 16:20 - Frame size"]
#[inline]
pub fn frsz(&mut self) -> _FRSZW {
_FRSZW { w: self }
}
#[doc = "Bits 24:25 - FIFO Packing Mode"]
#[inline]
pub fn fpack(&mut self) -> _FPACKW {
_FPACKW { w: self }
}
#[doc = "Bits 26:27 - FIFO Combine Mode"]
#[inline]
pub fn fcomb(&mut self) -> _FCOMBW {
_FCOMBW { w: self }
}
#[doc = "Bit 28 - FIFO Continue on Error"]
#[inline]
pub fn fcont(&mut self) -> _FCONTW {
_FCONTW { w: self }
}
}
| {
*self == FSDR::_1
} |
user.js | const db = require('../db');
const { updateToken } = require('../middlewares/auth');
const login = async (ctx) => { | if (result.length) {
let token = updateToken(ctx, { uid: 1 });
ctx.body = {
code: 200,
data: {
uid: 1,
username: 'admin',
token
}
};
} else {
ctx.throw(401, '用户名或密码不正确!');
}
} catch (err) {
ctx.throw(500, err.message);
}
}
module.exports = {
login
} | try {
const { username, password } = ctx.request.body;
let result = await db.users.find({ username, password }); |
continuous.go | package server
import (
"context"
"fmt"
"strings"
"time"
"github.com/mimecast/dtail/internal/clients"
"github.com/mimecast/dtail/internal/config"
"github.com/mimecast/dtail/internal/io/logger"
"github.com/mimecast/dtail/internal/omode"
gossh "golang.org/x/crypto/ssh"
)
type continuous struct {
}
func | () *continuous {
return &continuous{}
}
func (c *continuous) start(ctx context.Context) {
logger.Info("Starting continuous job runner after 10s")
time.Sleep(time.Second * 10)
c.runJobs(ctx)
}
func (c *continuous) runJobs(ctx context.Context) {
for _, job := range config.Server.Continuous {
if !job.Enable {
logger.Debug(job.Name, "Not running job as not enabled")
continue
}
go func(job config.Continuous) {
c.runJob(ctx, job)
for {
select {
// Retry after a minute
case <-time.After(time.Minute):
c.runJob(ctx, job)
case <-ctx.Done():
return
}
}
}(job)
}
}
func (c *continuous) runJob(ctx context.Context, job config.Continuous) {
logger.Debug(job.Name, "Processing job")
files := fillDates(job.Files)
outfile := fillDates(job.Outfile)
servers := strings.Join(job.Servers, ",")
if servers == "" {
servers = config.Server.SSHBindAddress
}
args := clients.Args{
ConnectionsPerCPU: 10,
Discovery: job.Discovery,
ServersStr: servers,
What: files,
Mode: omode.TailClient,
UserName: config.ContinuousUser,
}
args.SSHAuthMethods = append(args.SSHAuthMethods, gossh.Password(job.Name))
query := fmt.Sprintf("%s outfile %s", job.Query, outfile)
client, err := clients.NewMaprClient(args, query, clients.NonCumulativeMode)
if err != nil {
logger.Error(fmt.Sprintf("Unable to create job %s", job.Name), err)
return
}
jobCtx, cancel := context.WithCancel(ctx)
defer cancel()
if job.RestartOnDayChange {
go func() {
if c.waitForDayChange(ctx) {
logger.Info(fmt.Sprintf("Canceling job %s due to day change", job.Name))
cancel()
}
}()
}
logger.Info(fmt.Sprintf("Starting job %s", job.Name))
status := client.Start(jobCtx, make(chan string))
logMessage := fmt.Sprintf("Job exited with status %d", status)
if status != 0 {
logger.Warn(logMessage)
return
}
logger.Info(logMessage)
}
func (c *continuous) waitForDayChange(ctx context.Context) bool {
startTime := time.Now()
for {
select {
case <-time.After(time.Second):
if time.Now().Day() != startTime.Day() {
return true
}
case <-ctx.Done():
return false
}
}
}
| newContinuous |
money.go | // Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package money
import (
"errors"
pb "github.com/tiennampham23/microservices-demo/src/frontend/genproto"
)
const (
nanosMin = -999999999
nanosMax = +999999999
nanosMod = 1000000000
)
var (
ErrInvalidValue = errors.New("one of the specified money values is invalid")
ErrMismatchingCurrency = errors.New("mismatching currency codes")
)
// IsValid checks if specified value has a valid units/nanos signs and ranges.
func IsValid(m pb.Money) bool {
return signMatches(m) && validNanos(m.GetNanos())
}
func signMatches(m pb.Money) bool {
return m.GetNanos() == 0 || m.GetUnits() == 0 || (m.GetNanos() < 0) == (m.GetUnits() < 0)
}
func validNanos(nanos int32) bool { return nanosMin <= nanos && nanos <= nanosMax }
// IsZero returns true if the specified money value is equal to zero.
func IsZero(m pb.Money) bool { return m.GetUnits() == 0 && m.GetNanos() == 0 }
// IsPositive returns true if the specified money value is valid and is
// positive.
func IsPositive(m pb.Money) bool {
return IsValid(m) && m.GetUnits() > 0 || (m.GetUnits() == 0 && m.GetNanos() > 0)
}
// IsNegative returns true if the specified money value is valid and is
// negative.
func IsNegative(m pb.Money) bool {
return IsValid(m) && m.GetUnits() < 0 || (m.GetUnits() == 0 && m.GetNanos() < 0)
}
// AreSameCurrency returns true if values l and r have a currency code and
// they are the same values.
func AreSameCurrency(l, r pb.Money) bool {
return l.GetCurrencyCode() == r.GetCurrencyCode() && l.GetCurrencyCode() != ""
}
// AreEquals returns true if values l and r are the equal, including the
// currency. This does not check validity of the provided values.
func AreEquals(l, r pb.Money) bool {
return l.GetCurrencyCode() == r.GetCurrencyCode() &&
l.GetUnits() == r.GetUnits() && l.GetNanos() == r.GetNanos()
}
// Negate returns the same amount with the sign negated.
func Negate(m pb.Money) pb.Money {
return pb.Money{
Units: -m.GetUnits(),
Nanos: -m.GetNanos(),
CurrencyCode: m.GetCurrencyCode()}
}
// Must panics if the given error is not nil. This can be used with other
// functions like: "m := Must(Sum(a,b))".
func Must(v pb.Money, err error) pb.Money {
if err != nil {
panic(err)
}
return v
}
// Sum adds two values. Returns an error if one of the values are invalid or
// currency codes are not matching (unless currency code is unspecified for
// both).
func Sum(l, r pb.Money) (pb.Money, error) {
if !IsValid(l) || !IsValid(r) {
return pb.Money{}, ErrInvalidValue
} else if l.GetCurrencyCode() != r.GetCurrencyCode() {
return pb.Money{}, ErrMismatchingCurrency
}
units := l.GetUnits() + r.GetUnits()
nanos := l.GetNanos() + r.GetNanos()
if (units == 0 && nanos == 0) || (units > 0 && nanos >= 0) || (units < 0 && nanos <= 0) {
// same sign <units, nanos>
units += int64(nanos / nanosMod)
nanos = nanos % nanosMod
} else {
// different sign. nanos guaranteed to not to go over the limit
if units > 0 {
units--
nanos += nanosMod
} else {
units++
nanos -= nanosMod
}
}
return pb.Money{
Units: units,
Nanos: nanos,
CurrencyCode: l.GetCurrencyCode()}, nil
}
// MultiplySlow is a slow multiplication operation done through adding the value
// to itself n-1 times.
func MultiplySlow(m pb.Money, n uint32) pb.Money | {
out := m
for n > 1 {
out = Must(Sum(out, m))
n--
}
return out
} |
|
vue-resize.esm.js | function getInternetExplorerVersion() {
var ua = window.navigator.userAgent;
var msie = ua.indexOf('MSIE ');
if (msie > 0) {
// IE 10 or older => return version number
return parseInt(ua.substring(msie + 5, ua.indexOf('.', msie)), 10);
}
var trident = ua.indexOf('Trident/');
if (trident > 0) {
// IE 11 => return version number
var rv = ua.indexOf('rv:');
return parseInt(ua.substring(rv + 3, ua.indexOf('.', rv)), 10);
}
var edge = ua.indexOf('Edge/');
if (edge > 0) {
// Edge (IE 12+) => return version number
return parseInt(ua.substring(edge + 5, ua.indexOf('.', edge)), 10);
} // other browser
return -1;
}
//
var isIE;
function initCompat() {
if (!initCompat.init) {
initCompat.init = true;
isIE = getInternetExplorerVersion() !== -1;
}
}
var script = {
name: 'ResizeObserver',
props: {
emitOnMount: {
type: Boolean,
default: false
},
ignoreWidth: {
type: Boolean,
default: false
},
ignoreHeight: {
type: Boolean,
default: false
}
},
mounted: function mounted() {
var _this = this;
initCompat();
this.$nextTick(function () {
_this._w = _this.$el.offsetWidth;
_this._h = _this.$el.offsetHeight;
if (_this.emitOnMount) {
_this.emitSize();
}
});
var object = document.createElement('object');
this._resizeObject = object;
object.setAttribute('aria-hidden', 'true');
object.setAttribute('tabindex', -1);
object.onload = this.addResizeHandlers;
object.type = 'text/html';
if (isIE) {
this.$el.appendChild(object);
}
object.data = 'about:blank';
if (!isIE) {
this.$el.appendChild(object);
}
},
beforeDestroy: function beforeDestroy() {
this.removeResizeHandlers();
},
methods: {
compareAndNotify: function compareAndNotify() {
if (!this.ignoreWidth && this._w !== this.$el.offsetWidth || !this.ignoreHeight && this._h !== this.$el.offsetHeight) {
this._w = this.$el.offsetWidth;
this._h = this.$el.offsetHeight;
this.emitSize();
}
},
emitSize: function emitSize() {
this.$emit('notify', {
width: this._w,
height: this._h
});
},
addResizeHandlers: function addResizeHandlers() {
this._resizeObject.contentDocument.defaultView.addEventListener('resize', this.compareAndNotify);
this.compareAndNotify();
},
removeResizeHandlers: function removeResizeHandlers() {
if (this._resizeObject && this._resizeObject.onload) {
if (!isIE && this._resizeObject.contentDocument) {
this._resizeObject.contentDocument.defaultView.removeEventListener('resize', this.compareAndNotify);
}
this.$el.removeChild(this._resizeObject);
this._resizeObject.onload = null;
this._resizeObject = null;
}
}
}
};
function normalizeComponent(template, style, script, scopeId, isFunctionalTemplate, moduleIdentifier
/* server only */
, shadowMode, createInjector, createInjectorSSR, createInjectorShadow) {
if (typeof shadowMode !== 'boolean') {
createInjectorSSR = createInjector;
createInjector = shadowMode;
shadowMode = false;
} // Vue.extend constructor export interop.
var options = typeof script === 'function' ? script.options : script; // render functions
if (template && template.render) {
options.render = template.render;
options.staticRenderFns = template.staticRenderFns;
options._compiled = true; // functional template
if (isFunctionalTemplate) {
options.functional = true;
}
} // scopedId
if (scopeId) {
options._scopeId = scopeId;
}
var hook;
if (moduleIdentifier) {
// server build
hook = function hook(context) {
// 2.3 injection
context = context || // cached call
this.$vnode && this.$vnode.ssrContext || // stateful
this.parent && this.parent.$vnode && this.parent.$vnode.ssrContext; // functional
// 2.2 with runInNewContext: true
if (!context && typeof __VUE_SSR_CONTEXT__ !== 'undefined') {
context = __VUE_SSR_CONTEXT__;
} // inject component styles
if (style) {
style.call(this, createInjectorSSR(context));
} // register component module identifier for async chunk inference
if (context && context._registeredComponents) {
context._registeredComponents.add(moduleIdentifier);
}
}; // used by ssr in case component is cached and beforeCreate
// never gets called
options._ssrRegister = hook;
} else if (style) {
hook = shadowMode ? function (context) {
style.call(this, createInjectorShadow(context, this.$root.$options.shadowRoot));
} : function (context) {
style.call(this, createInjector(context));
};
}
if (hook) {
if (options.functional) {
// register for functional component in vue file
var originalRender = options.render;
options.render = function renderWithStyleInjection(h, context) {
hook.call(context);
return originalRender(h, context);
};
} else {
// inject component registration as beforeCreate hook
var existing = options.beforeCreate;
options.beforeCreate = existing ? [].concat(existing, hook) : [hook];
}
}
return script;
}
/* script */
var __vue_script__ = script;
/* template */
var __vue_render__ = function __vue_render__() {
var _vm = this;
var _h = _vm.$createElement;
var _c = _vm._self._c || _h;
return _c("div", {
staticClass: "resize-observer",
attrs: {
tabindex: "-1"
}
});
};
var __vue_staticRenderFns__ = [];
__vue_render__._withStripped = true;
/* style */
var __vue_inject_styles__ = undefined;
/* scoped */
var __vue_scope_id__ = "data-v-8859cc6c";
/* module identifier */
var __vue_module_identifier__ = undefined;
/* functional template */
|
/* style inject shadow dom */
var __vue_component__ = /*#__PURE__*/normalizeComponent({
render: __vue_render__,
staticRenderFns: __vue_staticRenderFns__
}, __vue_inject_styles__, __vue_script__, __vue_scope_id__, __vue_is_functional_template__, __vue_module_identifier__, false, undefined, undefined, undefined);
function install(Vue) {
// eslint-disable-next-line vue/component-definition-name-casing
Vue.component('resize-observer', __vue_component__);
Vue.component('ResizeObserver', __vue_component__);
}
var plugin = {
// eslint-disable-next-line no-undef
version: "1.0.1",
install: install
};
var GlobalVue = null;
if (typeof window !== 'undefined') {
GlobalVue = window.Vue;
} else if (typeof global !== 'undefined') {
GlobalVue = global.Vue;
}
if (GlobalVue) {
GlobalVue.use(plugin);
}
export default plugin;
export { __vue_component__ as ResizeObserver, install };
//# sourceMappingURL=vue-resize.esm.js.map | var __vue_is_functional_template__ = false;
/* style inject */
/* style inject SSR */ |
10-strerror.js | var setup = require('./setup'),
assert = require('assert');
setup(function(err, cb) {
assert(!err, "setup failure");
| cb.strError(1000);
// Make sure we can get error strings properly
assert( cb.strError(0) == 'Success', 'Error strings are being returned incorrectly' );
process.exit(0);
}) | // Make sure an invalid errorCode doesn't crash anything |
response.rs | use serde_json::Value;
#[derive(Debug, Serialize, Deserialize)]
pub struct | {
pub contry: String,
pub sex:Boolean,
}
| Response |
Animation.ts | type KeyFrames = { step: string, translate: string }[];
export class Animation {
private static keyFrames: KeyFrames = [
{
step: '0%',
translate: '-10%,10%',
},
{
step: '10%',
translate: '-25%,0%',
},
{
step: '20%',
translate: '-30%,10%',
},
{
step: '30%',
translate: '-30%,30%',
},
{
step: '40%',
translate: ':-20%,20%',
},
{
step: '50%',
translate: '-15%,10%',
},
{
step: '60%',
translate: '-20%,20%',
},
{
step: '70%',
translate: '-5%,20%',
},
{
step: '80%',
translate: '-25%,5%',
},
{
step: '90%',
translate: '-30%,25%',
},
{
step: '100%',
translate: '-10%,10%',
},
];
private static createKeyframes(frames: KeyFrames): string {
let steps = '';
frames.forEach(({ step, translate }) => {
steps += `${step} { transform: translate(${translate}); }`;
});
return `@keyframes grained {${steps}}`;
}
private static create(): void {
// Добавляем элемент style в документ
const style = document.createElement('style');
style.id = 'grained-animation'; | document.body.appendChild(style);
}
static add(): void {
const isExist = document.querySelector('[data-grained-animation]');
if (!isExist) {
this.create();
}
}
}
export default Animation; | style.dataset.grainedAnimation = '';
style.innerHTML = this.createKeyframes(this.keyFrames); |
tostring.go | // Copyright 2019 Nebularis Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package text
import (
"fmt"
"reflect"
"sort"
"strings"
)
func ToString(i interface{}) string {
var w Writer
w.W(i)
return w.String()
}
func ToReflectString(i interface{}) string {
m := make(map[interface{}]struct{})
var w Writer
writeInterface(&w, m, i)
return w.String()
}
func writeInterface(w *Writer, m map[interface{}]struct{}, iface interface{}) | {
if iface == nil {
w.W("<nil>")
return
}
t := reflect.TypeOf(iface)
switch t.Kind() {
case reflect.Ptr:
if _, found := m[iface]; found {
w.W("...")
return
}
m[iface] = struct{}{}
if reflect.ValueOf(iface).IsNil() {
return
}
writeInterface(w, m, reflect.ValueOf(iface).Elem().Interface())
case reflect.String:
w.W(reflect.ValueOf(iface).String())
case reflect.Bool:
w.W(reflect.ValueOf(iface).Bool())
case reflect.Slice:
w.W("[]{").Indent()
needComma := false
v := reflect.ValueOf(iface)
for i := 0; i < v.Len(); i++ {
if needComma {
w.W(",")
}
needComma = true
w.Ln()
writeInterface(w, m, v.Index(i).Interface())
}
w.Dedent().Ln().W("}")
case reflect.Struct:
var names []string
for i := 0; i < t.NumField(); i++ {
f := t.Field(i)
names = append(names, f.Name)
}
sort.Strings(names)
w.W(t.Name()).W(" {").Indent()
needComma := false
for _, name := range names {
if strings.ToLower(string(name[0])) == string(name[0]) {
continue
}
f, _ := t.FieldByName(name)
val := reflect.ValueOf(iface).FieldByName(name).Interface()
if val == nil || reflect.ValueOf(val).IsZero() {
continue
}
if needComma {
w.W(",")
}
needComma = true
w.Ln()
w.W(f.Name).W(": ")
writeInterface(w, m, val)
}
w.Dedent().Ln().W("}")
case reflect.Map:
keys := reflect.ValueOf(iface).MapKeys()
sort.SliceStable(keys, func(i, j int) bool {
return strings.Compare(keys[i].String(), keys[j].String()) < 0
})
w.W("{").Indent()
for j, k := range keys {
if j > 0 {
w.W(",")
}
w.Ln()
w.W(k.String()).W(": ")
writeInterface(w, m, reflect.ValueOf(iface).MapIndex(k).Interface())
}
w.Dedent().Ln().W("}")
case reflect.Int32, reflect.Int:
w.W(reflect.ValueOf(iface).Int())
default:
panic(fmt.Sprintf("NYI: %v", t.Kind()))
}
} |
|
test_invocations.py | from jawa.constants import ConstantPool
from jawa.util.bytecode import Instruction
from pyjvm.core.actions import Pop, Invoke
from pyjvm.core.class_loaders import FixedClassLoader
from pyjvm.core.jvm_class import JvmClass, BytecodeMethod, MethodKey
from pyjvm.core.jvm_types import Integer, RootObjectType
from test.utils import constant_instruction, assert_instruction, SOME_INT
def test_invoke_v():
| method_name = 'method_name'
class_name = 'class_name'
consts = ConstantPool()
descriptor = '(II)V'
key = MethodKey(method_name, descriptor)
no_op = Instruction.create('nop')
method = BytecodeMethod(
name='method_name',
descriptor='(II)V',
max_locals=5,
max_stack=5,
instructions=[no_op, no_op],
args=[Integer, Integer],
)
jvm_class = JvmClass(
class_name,
RootObjectType.refers_to,
consts,
methods={
key: method
}
)
method_ref = consts.create_method_ref(class_name, method_name, descriptor)
instruction = constant_instruction('invokevirtual', method_ref)
loader = FixedClassLoader({
class_name: jvm_class
})
instance = loader.default_instance(class_name)
arg_value = SOME_INT
arguments = [instance, arg_value, arg_value]
reversed_arguments = list(reversed(arguments))
assert_instruction(
constants=consts,
loader=loader,
instruction=instruction,
op_stack=reversed_arguments,
expected=[
Pop(3),
Invoke(class_name, key, arguments)
]
) |
|
move_data.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
Data structures used for tracking moves. Please see the extensive
comments in the section "Moves and initialization" and in `doc.rs`.
*/
use std::cell::RefCell;
use std::rc::Rc;
use std::uint;
use std::collections::{HashMap, HashSet};
use middle::borrowck::*;
use middle::dataflow::DataFlowContext;
use middle::dataflow::DataFlowOperator;
use euv = middle::expr_use_visitor;
use middle::ty;
use syntax::ast;
use syntax::ast_util;
use syntax::codemap::Span;
use util::ppaux::Repr;
pub struct MoveData {
/// Move paths. See section "Move paths" in `doc.rs`.
pub paths: RefCell<Vec<MovePath>>,
/// Cache of loan path to move path index, for easy lookup.
pub path_map: RefCell<HashMap<Rc<LoanPath>, MovePathIndex>>,
/// Each move or uninitialized variable gets an entry here.
pub moves: RefCell<Vec<Move>>,
/// Assignments to a variable, like `x = foo`. These are assigned
/// bits for dataflow, since we must track them to ensure that
/// immutable variables are assigned at most once along each path.
pub var_assignments: RefCell<Vec<Assignment>>,
/// Assignments to a path, like `x.f = foo`. These are not
/// assigned dataflow bits, but we track them because they still
/// kill move bits.
pub path_assignments: RefCell<Vec<Assignment>>,
/// Assignments to a variable or path, like `x = foo`, but not `x += foo`.
pub assignee_ids: RefCell<HashSet<ast::NodeId>>,
}
pub struct FlowedMoveData<'a> {
pub move_data: MoveData,
pub dfcx_moves: MoveDataFlow<'a>,
// We could (and maybe should, for efficiency) combine both move
// and assign data flow into one, but this way it's easier to
// distinguish the bits that correspond to moves and assignments.
pub dfcx_assign: AssignDataFlow<'a>
}
/// Index into `MoveData.paths`, used like a pointer
#[deriving(PartialEq)]
pub struct MovePathIndex(uint);
impl MovePathIndex {
fn get(&self) -> uint {
let MovePathIndex(v) = *self; v
}
}
impl Clone for MovePathIndex {
fn clone(&self) -> MovePathIndex {
MovePathIndex(self.get())
}
}
static InvalidMovePathIndex: MovePathIndex =
MovePathIndex(uint::MAX);
/// Index into `MoveData.moves`, used like a pointer
#[deriving(PartialEq)]
pub struct MoveIndex(uint);
impl MoveIndex {
fn get(&self) -> uint {
let MoveIndex(v) = *self; v
}
}
static InvalidMoveIndex: MoveIndex =
MoveIndex(uint::MAX);
pub struct MovePath {
/// Loan path corresponding to this move path
pub loan_path: Rc<LoanPath>,
/// Parent pointer, `InvalidMovePathIndex` if root
pub parent: MovePathIndex,
/// Head of linked list of moves to this path,
/// `InvalidMoveIndex` if not moved
pub first_move: MoveIndex,
/// First node in linked list of children, `InvalidMovePathIndex` if leaf
pub first_child: MovePathIndex,
/// Next node in linked list of parent's children (siblings),
/// `InvalidMovePathIndex` if none.
pub next_sibling: MovePathIndex,
}
#[deriving(PartialEq)]
pub enum MoveKind {
Declared, // When declared, variables start out "moved".
MoveExpr, // Expression or binding that moves a variable
MovePat, // By-move binding
Captured // Closure creation that moves a value
}
pub struct Move {
/// Path being moved.
pub path: MovePathIndex,
/// id of node that is doing the move.
pub id: ast::NodeId,
/// Kind of move, for error messages.
pub kind: MoveKind,
/// Next node in linked list of moves from `path`, or `InvalidMoveIndex`
pub next_move: MoveIndex
}
pub struct Assignment {
/// Path being assigned.
pub path: MovePathIndex,
/// id where assignment occurs
pub id: ast::NodeId,
| }
#[deriving(Clone)]
pub struct MoveDataFlowOperator;
pub type MoveDataFlow<'a> = DataFlowContext<'a, MoveDataFlowOperator>;
#[deriving(Clone)]
pub struct AssignDataFlowOperator;
pub type AssignDataFlow<'a> = DataFlowContext<'a, AssignDataFlowOperator>;
impl MoveData {
pub fn new() -> MoveData {
MoveData {
paths: RefCell::new(Vec::new()),
path_map: RefCell::new(HashMap::new()),
moves: RefCell::new(Vec::new()),
path_assignments: RefCell::new(Vec::new()),
var_assignments: RefCell::new(Vec::new()),
assignee_ids: RefCell::new(HashSet::new()),
}
}
fn path_loan_path(&self, index: MovePathIndex) -> Rc<LoanPath> {
self.paths.borrow().get(index.get()).loan_path.clone()
}
fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
self.paths.borrow().get(index.get()).parent
}
fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
self.paths.borrow().get(index.get()).first_move
}
fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
self.paths.borrow().get(index.get()).first_child
}
fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
self.paths.borrow().get(index.get()).next_sibling
}
fn set_path_first_move(&self,
index: MovePathIndex,
first_move: MoveIndex) {
self.paths.borrow_mut().get_mut(index.get()).first_move = first_move
}
fn set_path_first_child(&self,
index: MovePathIndex,
first_child: MovePathIndex) {
self.paths.borrow_mut().get_mut(index.get()).first_child = first_child
}
fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
//! Type safe indexing operator
self.moves.borrow().get(index.get()).next_move
}
fn is_var_path(&self, index: MovePathIndex) -> bool {
//! True if `index` refers to a variable
self.path_parent(index) == InvalidMovePathIndex
}
pub fn move_path(&self,
tcx: &ty::ctxt,
lp: Rc<LoanPath>) -> MovePathIndex {
/*!
* Returns the existing move path index for `lp`, if any,
* and otherwise adds a new index for `lp` and any of its
* base paths that do not yet have an index.
*/
match self.path_map.borrow().find(&lp) {
Some(&index) => {
return index;
}
None => {}
}
let index = match *lp {
LpVar(..) => {
let index = MovePathIndex(self.paths.borrow().len());
self.paths.borrow_mut().push(MovePath {
loan_path: lp.clone(),
parent: InvalidMovePathIndex,
first_move: InvalidMoveIndex,
first_child: InvalidMovePathIndex,
next_sibling: InvalidMovePathIndex,
});
index
}
LpExtend(ref base, _, _) => {
let parent_index = self.move_path(tcx, base.clone());
let index = MovePathIndex(self.paths.borrow().len());
let next_sibling = self.path_first_child(parent_index);
self.set_path_first_child(parent_index, index);
self.paths.borrow_mut().push(MovePath {
loan_path: lp.clone(),
parent: parent_index,
first_move: InvalidMoveIndex,
first_child: InvalidMovePathIndex,
next_sibling: next_sibling,
});
index
}
};
debug!("move_path(lp={}, index={:?})",
lp.repr(tcx),
index);
assert_eq!(index.get(), self.paths.borrow().len() - 1);
self.path_map.borrow_mut().insert(lp, index);
return index;
}
fn existing_move_path(&self, lp: &Rc<LoanPath>)
-> Option<MovePathIndex> {
self.path_map.borrow().find_copy(lp)
}
fn existing_base_paths(&self, lp: &Rc<LoanPath>)
-> Vec<MovePathIndex> {
let mut result = vec!();
self.add_existing_base_paths(lp, &mut result);
result
}
fn add_existing_base_paths(&self, lp: &Rc<LoanPath>,
result: &mut Vec<MovePathIndex>) {
/*!
* Adds any existing move path indices for `lp` and any base
* paths of `lp` to `result`, but does not add new move paths
*/
match self.path_map.borrow().find_copy(lp) {
Some(index) => {
self.each_base_path(index, |p| {
result.push(p);
true
});
}
None => {
match **lp {
LpVar(..) => { }
LpExtend(ref b, _, _) => {
self.add_existing_base_paths(b, result);
}
}
}
}
}
pub fn add_move(&self,
tcx: &ty::ctxt,
lp: Rc<LoanPath>,
id: ast::NodeId,
kind: MoveKind) {
/*!
* Adds a new move entry for a move of `lp` that occurs at
* location `id` with kind `kind`.
*/
debug!("add_move(lp={}, id={:?}, kind={:?})",
lp.repr(tcx),
id,
kind);
let path_index = self.move_path(tcx, lp);
let move_index = MoveIndex(self.moves.borrow().len());
let next_move = self.path_first_move(path_index);
self.set_path_first_move(path_index, move_index);
self.moves.borrow_mut().push(Move {
path: path_index,
id: id,
kind: kind,
next_move: next_move
});
}
pub fn add_assignment(&self,
tcx: &ty::ctxt,
lp: Rc<LoanPath>,
assign_id: ast::NodeId,
span: Span,
assignee_id: ast::NodeId,
mode: euv::MutateMode) {
/*!
* Adds a new record for an assignment to `lp` that occurs at
* location `id` with the given `span`.
*/
debug!("add_assignment(lp={}, assign_id={:?}, assignee_id={:?}",
lp.repr(tcx), assign_id, assignee_id);
let path_index = self.move_path(tcx, lp.clone());
match mode {
euv::Init | euv::JustWrite => {
self.assignee_ids.borrow_mut().insert(assignee_id);
}
euv::WriteAndRead => { }
}
let assignment = Assignment {
path: path_index,
id: assign_id,
span: span,
};
if self.is_var_path(path_index) {
debug!("add_assignment[var](lp={}, assignment={}, path_index={:?})",
lp.repr(tcx), self.var_assignments.borrow().len(), path_index);
self.var_assignments.borrow_mut().push(assignment);
} else {
debug!("add_assignment[path](lp={}, path_index={:?})",
lp.repr(tcx), path_index);
self.path_assignments.borrow_mut().push(assignment);
}
}
fn add_gen_kills(&self,
tcx: &ty::ctxt,
dfcx_moves: &mut MoveDataFlow,
dfcx_assign: &mut AssignDataFlow) {
/*!
* Adds the gen/kills for the various moves and
* assignments into the provided data flow contexts.
* Moves are generated by moves and killed by assignments and
* scoping. Assignments are generated by assignment to variables and
* killed by scoping. See `doc.rs` for more details.
*/
for (i, move) in self.moves.borrow().iter().enumerate() {
dfcx_moves.add_gen(move.id, i);
}
for (i, assignment) in self.var_assignments.borrow().iter().enumerate() {
dfcx_assign.add_gen(assignment.id, i);
self.kill_moves(assignment.path, assignment.id, dfcx_moves);
}
for assignment in self.path_assignments.borrow().iter() {
self.kill_moves(assignment.path, assignment.id, dfcx_moves);
}
// Kill all moves related to a variable `x` when it goes out
// of scope:
for path in self.paths.borrow().iter() {
match *path.loan_path {
LpVar(id) => {
let kill_id = tcx.region_maps.var_scope(id);
let path = *self.path_map.borrow().get(&path.loan_path);
self.kill_moves(path, kill_id, dfcx_moves);
}
LpExtend(..) => {}
}
}
// Kill all assignments when the variable goes out of scope:
for (assignment_index, assignment) in
self.var_assignments.borrow().iter().enumerate() {
match *self.path_loan_path(assignment.path) {
LpVar(id) => {
let kill_id = tcx.region_maps.var_scope(id);
dfcx_assign.add_kill(kill_id, assignment_index);
}
LpExtend(..) => {
tcx.sess.bug("var assignment for non var path");
}
}
}
}
fn each_base_path(&self, index: MovePathIndex, f: |MovePathIndex| -> bool)
-> bool {
let mut p = index;
while p != InvalidMovePathIndex {
if !f(p) {
return false;
}
p = self.path_parent(p);
}
return true;
}
fn each_extending_path(&self,
index: MovePathIndex,
f: |MovePathIndex| -> bool)
-> bool {
if !f(index) {
return false;
}
let mut p = self.path_first_child(index);
while p != InvalidMovePathIndex {
if !self.each_extending_path(p, |x| f(x)) {
return false;
}
p = self.path_next_sibling(p);
}
return true;
}
fn each_applicable_move(&self,
index0: MovePathIndex,
f: |MoveIndex| -> bool)
-> bool {
let mut ret = true;
self.each_extending_path(index0, |index| {
let mut p = self.path_first_move(index);
while p != InvalidMoveIndex {
if !f(p) {
ret = false;
break;
}
p = self.move_next_move(p);
}
ret
});
ret
}
fn kill_moves(&self,
path: MovePathIndex,
kill_id: ast::NodeId,
dfcx_moves: &mut MoveDataFlow) {
self.each_applicable_move(path, |move_index| {
dfcx_moves.add_kill(kill_id, move_index.get());
true
});
}
}
impl<'a> FlowedMoveData<'a> {
pub fn new(move_data: MoveData,
tcx: &'a ty::ctxt,
id_range: ast_util::IdRange,
body: &ast::Block)
-> FlowedMoveData<'a> {
let mut dfcx_moves =
DataFlowContext::new(tcx,
MoveDataFlowOperator,
id_range,
move_data.moves.borrow().len());
let mut dfcx_assign =
DataFlowContext::new(tcx,
AssignDataFlowOperator,
id_range,
move_data.var_assignments.borrow().len());
move_data.add_gen_kills(tcx, &mut dfcx_moves, &mut dfcx_assign);
dfcx_moves.propagate(body);
dfcx_assign.propagate(body);
FlowedMoveData {
move_data: move_data,
dfcx_moves: dfcx_moves,
dfcx_assign: dfcx_assign,
}
}
pub fn each_path_moved_by(&self,
id: ast::NodeId,
f: |&Move, &LoanPath| -> bool)
-> bool {
/*!
* Iterates through each path moved by `id`
*/
self.dfcx_moves.each_gen_bit_frozen(id, |index| {
let move = self.move_data.moves.borrow();
let move = move.get(index);
let moved_path = move.path;
f(move, &*self.move_data.path_loan_path(moved_path))
})
}
pub fn kind_of_move_of_path(&self,
id: ast::NodeId,
loan_path: &Rc<LoanPath>)
-> Option<MoveKind> {
//! Returns the kind of a move of `loan_path` by `id`, if one exists.
let mut ret = None;
for loan_path_index in self.move_data.path_map.borrow().find(&*loan_path).iter() {
self.dfcx_moves.each_gen_bit_frozen(id, |move_index| {
let move = self.move_data.moves.borrow();
let move = move.get(move_index);
if move.path == **loan_path_index {
ret = Some(move.kind);
false
} else {
true
}
});
}
ret
}
pub fn each_move_of(&self,
id: ast::NodeId,
loan_path: &Rc<LoanPath>,
f: |&Move, &LoanPath| -> bool)
-> bool {
/*!
* Iterates through each move of `loan_path` (or some base path
* of `loan_path`) that *may* have occurred on entry to `id` without
* an intervening assignment. In other words, any moves that
* would invalidate a reference to `loan_path` at location `id`.
*/
// Bad scenarios:
//
// 1. Move of `a.b.c`, use of `a.b.c`
// 2. Move of `a.b.c`, use of `a.b.c.d`
// 3. Move of `a.b.c`, use of `a` or `a.b`
//
// OK scenario:
//
// 4. move of `a.b.c`, use of `a.b.d`
let base_indices = self.move_data.existing_base_paths(loan_path);
if base_indices.is_empty() {
return true;
}
let opt_loan_path_index = self.move_data.existing_move_path(loan_path);
let mut ret = true;
self.dfcx_moves.each_bit_on_entry_frozen(id, |index| {
let move = self.move_data.moves.borrow();
let move = move.get(index);
let moved_path = move.path;
if base_indices.iter().any(|x| x == &moved_path) {
// Scenario 1 or 2: `loan_path` or some base path of
// `loan_path` was moved.
if !f(move, &*self.move_data.path_loan_path(moved_path)) {
ret = false;
}
} else {
for &loan_path_index in opt_loan_path_index.iter() {
let cont = self.move_data.each_base_path(moved_path, |p| {
if p == loan_path_index {
// Scenario 3: some extension of `loan_path`
// was moved
f(move, &*self.move_data.path_loan_path(moved_path))
} else {
true
}
});
if !cont { ret = false; break }
}
}
ret
})
}
pub fn is_assignee(&self,
id: ast::NodeId)
-> bool {
//! True if `id` is the id of the LHS of an assignment
self.move_data.assignee_ids.borrow().iter().any(|x| x == &id)
}
pub fn each_assignment_of(&self,
id: ast::NodeId,
loan_path: &Rc<LoanPath>,
f: |&Assignment| -> bool)
-> bool {
/*!
* Iterates through every assignment to `loan_path` that
* may have occurred on entry to `id`. `loan_path` must be
* a single variable.
*/
let loan_path_index = {
match self.move_data.existing_move_path(loan_path) {
Some(i) => i,
None => {
// if there were any assignments, it'd have an index
return true;
}
}
};
self.dfcx_assign.each_bit_on_entry_frozen(id, |index| {
let assignment = self.move_data.var_assignments.borrow();
let assignment = assignment.get(index);
if assignment.path == loan_path_index && !f(assignment) {
false
} else {
true
}
})
}
}
impl DataFlowOperator for MoveDataFlowOperator {
#[inline]
fn initial_value(&self) -> bool {
false // no loans in scope by default
}
#[inline]
fn join(&self, succ: uint, pred: uint) -> uint {
succ | pred // moves from both preds are in scope
}
}
impl DataFlowOperator for AssignDataFlowOperator {
#[inline]
fn initial_value(&self) -> bool {
false // no assignments in scope by default
}
#[inline]
fn join(&self, succ: uint, pred: uint) -> uint {
succ | pred // moves from both preds are in scope
}
} | /// span of node where assignment occurs
pub span: Span, |
getItems.js | /* eslint-disable max-len */
const Apify = require('apify');
const { log } = Apify.utils;
const { LABEL } = require('./consts');
async function getItems(pageObj, pageData, resultsArr, label) {
if (label === LABEL.NEW) {
// need to wait after page 1
await pageObj.waitForSelector('.zg-grid-general-faceout');
for (let i = 1; i < 5; i++) { | await pageObj.waitForTimeout(5000)
}
const allItems = await pageObj.$$('.zg-grid-general-faceout');
for (const [index, item] of allItems.entries()) {
const obj = {
...pageData,
ID: index,
};
obj.name = await item.$eval('div > a.a-link-normal:nth-of-type(2) > span > div', el => el.innerHTML);
const priceExists = (await item.$('.a-color-price')) || null;
obj.price = priceExists ? await item.$eval('.a-color-price', el => el.innerText) : null;
obj.url = await item.$eval('div > a.a-link-normal:nth-of-type(1)', url => url.href);
obj.thumbnail = await item.$eval('div[class*="_p13n-zg-list-grid-desktop_maskStyle"] > img', url => url.src);
resultsArr.push(obj);
}
return;
}
const itemsObj = await pageObj.$$eval('div.p13n-sc-truncated', prods => prods.map(prod => prod.innerHTML));
const pricesObj = await pageObj.$$eval('span.p13n-sc-price', price => price.map(el => el.innerHTML));
const urlsObj = await pageObj.$$eval('span.aok-inline-block > a.a-link-normal', link => link.map(url => url.href));
const imgsObj = await pageObj.$$eval('a.a-link-normal > span > div.a-section > img', link => link.map(url => url.src));
// Scrape all items that match the selector
// Get rid of duplicate URLs (couldn't avoid scraping them)
const urlsArr = [];
for (const link of urlsObj) {
if (!urlsArr.includes(link)) {
urlsArr.push(link);
}
}
// Add scraped items to results array
for (let i = 0; i < Object.keys(itemsObj).length; i++) {
resultsArr.push({
...pageData,
ID: resultsArr.length,
name: itemsObj[i],
price: pricesObj[i],
url: urlsArr[i],
thumbnail: imgsObj[i],
});
}
}
async function scrapeDetailsPage(pageObj, pageData, label) {
const resultsArr = [];
// Scrape page 1
await getItems(pageObj, pageData, resultsArr, label);
// Go to page 2 and scrape
let nextPage;
try {
nextPage = await pageObj.waitForSelector('li.a-last > a');
} catch (e) {
log.error(`Could not extract second page - only one page returned. ${e}`);
}
if (nextPage) {
await nextPage.click();
await pageObj.waitForNavigation();
await getItems(pageObj, pageData, resultsArr, label);
await Apify.pushData(resultsArr);
log.info(`Saving results from ${await pageObj.title()}`);
}
}
module.exports = { scrapeDetailsPage }; | await Apify.utils.puppeteer.infiniteScroll(pageObj, { scrollDownAndUp: true, timeoutSecs: i, waitForSecs: 5 }); |
app.js | import { VideoPlayer } from "./video-player.js";
import { registerGamepadEvents, registerKeyboardEvents, registerMouseEvents, sendClickEvent } from "./register-events.js";
let playButton;
let videoPlayer;
showPlayButton();
window.document.oncontextmenu = function () {
return false; // cancel default menu
}
window.addEventListener('resize', function() {
videoPlayer.resizeVideo();
}, true);
function showPlayButton() {
if (!document.getElementById('playButton')) {
let elementPlayButton = document.createElement('img');
elementPlayButton.id = 'playButton';
elementPlayButton.src = './public/images/Play.png';
elementPlayButton.alt = 'Start Streaming';
playButton = document.getElementById('player').appendChild(elementPlayButton);
playButton.addEventListener('click', onClickPlayButton);
}
}
function onClickPlayButton() {
playButton.style.display = 'none';
const playerDiv = document.getElementById('player');
playerDiv.classList = 'h-full rounded-lg player active';
// add video player
const elementVideo = document.createElement('video');
elementVideo.id = 'Video';
elementVideo.style.touchAction = 'none';
playerDiv.appendChild(elementVideo);
// add video thumbnail
const elementVideoThumb = document.createElement('video');
elementVideoThumb.id = 'VideoThumbnail';
elementVideoThumb.style.touchAction = 'none';
playerDiv.appendChild(elementVideoThumb);
setupVideoPlayer([elementVideo, elementVideoThumb]).then(value => videoPlayer = value);
const buttonsContainer = document.createElement('div');
buttonsContainer.id = 'buttonsContainer';
playerDiv.appendChild(buttonsContainer);
const buttons = document.querySelector('#buttonsContainer');
// add green button
const elementBlueButton = document.createElement('button');
elementBlueButton.id = "blueButton";
elementBlueButton.innerHTML = "Light on";
buttons.appendChild(elementBlueButton);
elementBlueButton.addEventListener ("click", function() {
sendClickEvent(videoPlayer, 1);
});
// add green button
const elementGreenButton = document.createElement('button');
elementGreenButton.id = "greenButton";
elementGreenButton.innerHTML = "Light off";
buttons.appendChild(elementGreenButton);
elementGreenButton.addEventListener ("click", function() {
sendClickEvent(videoPlayer, 2);
});
// add orange button
const elementOrangeButton = document.createElement('button');
elementOrangeButton.id = "orangeButton";
elementOrangeButton.innerHTML = "Play audio";
buttons.appendChild(elementOrangeButton);
elementOrangeButton.addEventListener ("click", function() {
sendClickEvent(videoPlayer, 3);
});
// add fullscreen button
const elementFullscreenButton = document.createElement('img');
elementFullscreenButton.id = 'fullscreenButton';
elementFullscreenButton.src = './public/images/FullScreen.png';
playerDiv.appendChild(elementFullscreenButton);
elementFullscreenButton.addEventListener ("click", function() {
if (!document.fullscreenElement) {
if(document.documentElement.requestFullscreen) {
document.documentElement.requestFullscreen();
}
else if(document.documentElement.webkitRequestFullscreen){
document.documentElement.webkitRequestFullscreen(Element.ALLOW_KEYBOARD_INPUT);
}
}
});
document.addEventListener('webkitfullscreenchange', onFullscreenChange);
document.addEventListener('fullscreenchange', onFullscreenChange);
function | (e) {
if(document.webkitFullscreenElement || document.fullscreenElement) {
elementFullscreenButton.style.display = 'none';
}
else {
elementFullscreenButton.style.display = 'block';
}
}
}
async function setupVideoPlayer(elements, config) {
const videoPlayer = new VideoPlayer(elements, config);
await videoPlayer.setupConnection();
videoPlayer.ondisconnect = onDisconnect;
registerGamepadEvents(videoPlayer);
registerKeyboardEvents(videoPlayer);
registerMouseEvents(videoPlayer, elements[0]);
return videoPlayer;
}
function onDisconnect() {
const playerDiv = document.getElementById('player')
clearChildren(playerDiv);
videoPlayer = null;
showPlayButton();
}
function clearChildren(element) {
while (element.firstChild) {
element.removeChild(element.firstChild);
}
}
| onFullscreenChange |
stat.go | package model
import (
"gorm.io/gorm" | type Stat struct {
gorm.Model
Time time.Time
StreamID uint
Viewers int
} | "time"
)
|
master.item.service.ts | import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm'; |
@Injectable()
export class MasterItemService {
constructor(
@InjectRepository(MasterItem)
private readonly repo: Repository<MasterItem>,
) {}
public async getAll(): Promise<MasterItemDTO[]> {
return await this.repo
.find()
.then((items) => items.map((e) => MasterItemDTO.createEntity(e)));
}
} | import { MasterItem } from '../model/inventory.master.item';
import { MasterItemDTO } from '../dto/inventory.master.item.dto';
import { Repository } from 'typeorm'; |
rising_hot_sphere.py | #!/usr/bin/env python
# encoding: utf-8
"""
Test problem demonstrating 3D hot-sphere rising in an stabilized atmosphere
in Cartesian coordinates
This problem evolves the 3D Euler equations using an F-wave
method, with gravitational source term modifications.
The primary variables are:
density (rho), x,y, and z momentum (rho*u,rho*v,rho*w), and energy.
"""
import numpy as np
from mappedGrid import euler3d_mappedgrid as mg
try:
from mpi4py import MPI
mpiAvailable = True
except ImportError:
raise ImportError('mpi4py is not available')
mpiAvailable = False
if mpiAvailable:
mpiRank = MPI.COMM_WORLD.Get_rank()
mpiSize = MPI.COMM_WORLD.Get_size()
else:
mpiRank = 0
mpiSize = 1
# Constants
gamma = 1.4 # Ratio of specific heats
gamma1 = gamma - 1.
gR = 980.665 # Acceleration due to gravity [cm/s**2]
kBoltzmann = 1.3807e-16 # Boltzmann constant [erg/K]
nAvogadro = 6.0221e23 # Avogadro's number [1/mol]
# Hot Sphere Parameters
xSphere = 1000.e5; ySphere = 1000.e5; zSphere = 150.e5
rSphere = 40.e5 # Radius of Sphere [cm]
TSphere = 2.7e4 # Temperature of Sphere Perturbation
# Grid Parameters
mxyz = [80,80,80] # Number of Grid Cells
xyzMin = [500.e5 , 500.e5 , 80.0e5 ] # Domain limits (min) [cm]
xyzMax = [1500.0e5, 1500.0e5, 950.0e5] # Domain limits (max) [cm]
mapType = "ZeroToOne"
z0 = xyzMin[2]
zN = xyzMax[2]
# Gravity Terms
gravityTerm = True # Turn Gravity Term On or Off in Riemann Solver
gravityEflux = False # Turn Gravity Term in Energy Flux On/Off
gFlux = 0
if gravityEflux: gFlux = 1
#-----------------------------------------------------------------------
# Name: outputDensity(state)
#
# Description:
# Using derived quantities function, output 2D slice of density
# for use in regression testing.
#-----------------------------------------------------------------------
def outputDensity(state):
state.p[0,:,:,:] = state.q[0,:,:,:]
nx = np.size(state.q,1)
x = np.reshape(state.q[0,nx/2,:,:],np.size(state.q[0,nx/2,:,:]),order='F')
np.savetxt('verify_rising_hot_sphere_classic_1.txt',x,fmt='%18.8e',delimiter=' ')
#-----------------------------------------------------------------------
# Description:
# Equilibrium atmosphere
#
# Inputs:
# p0[mz+2*mbc] : pressure (1D array)
# rho0[mz+2*mbc] : density (1D array)
# Mavg[mz+2*mbc] : average molecular mass (1D array)
#
# Input/Outputs:
# p0,rho0,Mavg : 1D z-column initialization of p0 and rho0
#-----------------------------------------------------------------------
def setEquilibriumAtmosphere(p0,rho0,Mavg):
|
#-----------------------------------------------------------------------
# Description:
# Modify pressure to create numeric atmosphere equilibrium
#
# Inputs:
# ze0[mz+2*mbc+1] : cell edge grid values
# p0[mz+2*mbc] : pressure
# rho0[mz+2*mbc] : density
#
# Input/Outputs:
# p0,rho0 : 1D z-column modification of p0 and rho0
#-----------------------------------------------------------------------
def modifyEquilibriumAtmosphere(zep0,p0,rho0):
# Compute the delta-z (dz)
nz = np.size(zep0)-1
dz = np.zeros([nz],dtype='float',order='F')
for iz in range(nz-1):
dz[iz] = zep0[iz+1]-zep0[iz]
# Compute modified pressure at cell centers
iz = nz-1
dz2 = (dz[iz]+dz[iz-1])*0.5
p0[iz] = p0[iz] + rho0[iz]*gR*dz2
for iz in range(nz-1,0,-1):
dz2 = (dz[iz]+dz[iz-1])*0.5
finterp = dz[iz-1]/(dz[iz]+dz[iz-1])
rho_b = rho0[iz]*finterp + rho0[iz-1]*(1.-finterp)
p0[iz-1] = p0[iz] + rho_b*gR*dz2
return p0
#-----------------------------------------------------------------------
# Description:
# Custom BCs for the z-direction
#-----------------------------------------------------------------------
def customBCLowerZ(state,dim,t,qbc,auxbc,mbc):
for k in range(mbc):
qbc[0,:,:,k] = rho0[k]
qbc[1,:,:,k] = 0.
qbc[2,:,:,k] = 0.
qbc[3,:,:,k] = 0.
qbc[4,:,:,k] = p0[k]/gamma1 + qbc[0,:,:,k]*gR*zcpZ[k]*gFlux
def customBCUpperZ(state,dim,t,qbc,auxbc,mbc):
for k in range(mbc):
qbc[0,:,:,-k-1] = rho0[-k-1]
qbc[1,:,:,-k-1] = qbc[1,:,:,-mbc-1]
qbc[2,:,:,-k-1] = qbc[2,:,:,-mbc-1]
qbc[3,:,:,-k-1] = qbc[3,:,:,-mbc-1]
rhov2 = (qbc[1,:,:,-k-1]**2 + qbc[2,:,:,-k-1]**2 + qbc[3,:,:,-k-1]**2)/qbc[0,:,:,-k-1]
qbc[4,:,:,-k-1] = p0[-k-1]/gamma1 + 0.5*rhov2 + qbc[0,:,:,-k-1]*gR*zcpZ[-k-1]*gFlux
def customAuxBCLowerZ(state,dim,t,qbc,auxbc,mbc):
auxbc[:,:,:,:mbc] = auxtmp[:,:,:,:mbc]
def customAuxBCUpperZ(state,dim,t,qbc,auxbc,mbc):
auxbc[:,:,:,-mbc:] = auxtmp[:,:,:,-mbc:]
#-----------------------------------------------------------------------
# Main script for solving 3D Euler equations using Clawpack/PyClaw.
#-----------------------------------------------------------------------
def euler3d(kernel_language='Fortran',solver_type='classic',\
use_petsc=False,outdir='./_output',\
output_format='hdf5',file_prefix='equil',disable_output=False,\
mx=mxyz[0],my=mxyz[1],mz=mxyz[2],\
tfinal=64.0,num_output_times=1):
if use_petsc:
import clawpack.petclaw as pyclaw
else:
from clawpack import pyclaw
if solver_type=='classic':
solver = pyclaw.ClawSolver3D()
solver.dimensional_split = True
solver.limiters = pyclaw.limiters.tvd.minmod
solver.num_ghost = 2
solver.order = 2
solver.fwave = True
elif solver_type=='sharpclaw':
solver = pyclaw.SharpClawSolver3D()
else:
raise Exception('Unrecognized solver_type.')
import logging
solver.logger.setLevel(logging.DEBUG)
import euler_3d_gmap
solver.rp = euler_3d_gmap
solver.num_eqn = 5
solver.num_waves = 3
solver.cfl_max = 0.6
solver.cfl_desired = 0.5
solver.dt_initial = 1.e-0
solver.max_steps = 10000
# Initialize Domain
x = pyclaw.Dimension(0.0,1.0,mx,name='x')
y = pyclaw.Dimension(0.0,1.0,my,name='y')
z = pyclaw.Dimension(0.0,1.0,mz,name='z')
domain = pyclaw.Domain([x,y,z])
num_aux = 15
state = pyclaw.State(domain,solver.num_eqn,num_aux)
state.problem_data['gamma']=gamma
state.problem_data['g_r'] = gR
state.problem_data['gravity'] = gravityTerm
state.problem_data['gravityflux'] = gravityEflux
# Grids
mbc = solver.num_ghost
grid = state.grid
# Computational Grid Sizes
dxc = domain.grid.delta[0]
dyc = domain.grid.delta[1]
dzc = domain.grid.delta[2]
pmx, pmy, pmz = grid.num_cells[0], grid.num_cells[1], grid.num_cells[2]
# Computational Grid Centers and Edges
centers = grid.c_centers # centers (Comp.)
centersBC = grid.c_centers_with_ghost(mbc) # centers w Ghost (Comp.)
edgesBC = grid.c_edges_with_ghost(mbc) # edges w Ghost (Comp.)
# Grid Centers Without Boundary Cells (1D Slice) - Comp. and Phys.
xcc = grid.x.centers # x centers (Comp.)
ycc = grid.y.centers # y centers (Comp.)
zcc = grid.z.centers # z centers (Comp.)
xcp,ycp,zcp = mg.mapc2pwrapper(xcc,ycc,zcc,pmz,xyzMin,xyzMax,mapType)
# Grid Centers Without Boundary Cells (3D Arrays)
Xcc,Ycc,Zcc = centers[0][:][:][:],centers[1][:][:][:],centers[2][:][:][:]
Xcp,Ycp,Zcp = mg.mapc2pwrapper(Xcc,Ycc,Zcc,pmz,xyzMin,xyzMax,mapType)
Xcp = np.reshape(Xcp,[pmx,pmy,pmz],order='F') # x centers (Phys.)
Ycp = np.reshape(Ycp,[pmx,pmy,pmz],order='F') # y centers (Phys.)
Zcp = np.reshape(Zcp,[pmx,pmy,pmz],order='F') # z centers (Phys.)
# Grid Edges With Boundary Cells (1D Slice along z)- Comp. and Phys.
xecZ = edgesBC[0][0][0][:] # x edges along z (Comp.)
yecZ = edgesBC[1][0][0][:] # y edges along z (Comp.)
zecZ = edgesBC[2][0][0][:] # z edges along z (Comp.)
xepZ,yepZ,zepZ = mg.mapc2pwrapper(xecZ,yecZ,zecZ,pmz,xyzMin,xyzMax,mapType)
# Grid Centers With Boundary Cells (1D Slice along z) - Comp. and Phys.
global zcpZ
xccZ = centersBC[0][0][0][:] # x centers along z (Comp.)
yccZ = centersBC[1][0][0][:] # y centers along z (Comp.)
zccZ = centersBC[2][0][0][:] # z centers along z (Comp.)
xcpZ,ycpZ,zcpZ = mg.mapc2pwrapper(xccZ,yccZ,zccZ,pmz,xyzMin,xyzMax,mapType)
if np.sqrt(xepZ[0]**2+yepZ[0]**2+zepZ[0]**2) <= 0:
print "WARNING: z may go below Earth's surface"," zepZ: ",zepZ[0:10]
# Create vectors for 1D pressure and density column with boundary cells
mz0 = pmz+2*mbc
global p0, rho0, Mavg
p0 = np.zeros([mz0],dtype='float',order='F')
rho0 = np.zeros([mz0],dtype='float',order='F')
Mavg = np.zeros([mz0],dtype='float',order='F')
# Set the equilibrium pressure such that dp/dz = -rho*gR
p0,rho0,Mavg = setEquilibriumAtmosphere(p0,rho0,Mavg)
# Modify the equilibrium such that dp/dz = -rho*gR is held numerically
p0 = modifyEquilibriumAtmosphere(zepZ,p0,rho0)
# Set the auxiliary variables
xlower,ylower,zlower = edgesBC[0][0][0][0],edgesBC[1][0][0][0],edgesBC[2][0][0][0]
dxc,dyc,dzc = domain.grid.delta[0],domain.grid.delta[1],domain.grid.delta[2]
global auxtmp
auxtmp = np.zeros([num_aux,pmx+2*mbc,pmy+2*mbc,pmz+2*mbc],dtype='float',order='F')
auxtmp = mg.setauxiliaryvariables(num_aux,mbc,pmx,pmy,pmz,xlower,ylower,zlower,dxc,dyc,dzc,xyzMin,xyzMax,mapType)
state.aux[:,:,:,:] = auxtmp[:,mbc:-mbc,mbc:-mbc,mbc:-mbc]
# Set Index for Capcaity Function in state.aux (Python 0-based)
state.index_capa = 12
# Set the state variables (Initial Conditions)
# Initialize p,T,velSqrd
p = np.zeros([pmx,pmy,pmz],dtype='float',order='F')
T = np.zeros([pmx,pmy,pmz],dtype='float',order='F')
velSqrd = np.zeros([pmx,pmy,pmz],dtype='float',order='F')
# Density
for i in range(pmx):
for j in range(pmy):
# NEEDS TO BE FIXED WHEN MPI SLICES NORMAL TO Z
state.q[0,i,j,:] = rho0[mbc:pmz+mbc]
# Momentum
state.q[1,:,:,:] = 0. # x-momentum (rho*u)
state.q[2,:,:,:] = 0. # y-momentum (rho*v)
state.q[3,:,:,:] = 0. # z-momentum (rho*w)
# Velocity Squared (u**2+v**2+w**2)
velSqrd[:,:,:] = (state.q[1,:,:,:]**2+state.q[2,:,:,:]**2 + state.q[3,:,:,:]**2)/state.q[0,:,:,:]**2
# Energy
for i in range(pmx):
for j in range(pmy):
# NEEDS TO BE FIXED WHEN MPI SLICES NORMAL TO Z
p[i,j,:] = p0[mbc:pmz+mbc]
state.q[4,:,:,:] = p/gamma1 + 0.5*state.q[0,:,:,:]*velSqrd + state.q[0,:,:,:]*(gR)*Zcp[:,:,:]*gFlux
# Add Temperature Perturbation
T = p/state.q[0,:,:,:]
L = np.sqrt((Xcp-xSphere)**2+(Ycp-ySphere)**2+(Zcp-zSphere)**2)
for i in range(pmx):
for j in range(pmy):
for k in range(pmz):
if L[i,j,k] <= rSphere:
mu = Mavg[k+mbc]/nAvogadro
T[i,j,k] += TSphere*(kBoltzmann/mu)*(1.0-L[i,j,k]/rSphere)
p[i,j,k] = T[i,j,k]*state.q[0,i,j,k]
state.q[4,:,:,:] = p/gamma1 + 0.5*state.q[0,:,:,:]*velSqrd + state.q[0,:,:,:]*(gR)*Zcp[:,:,:]*gFlux # energy (e)
# Setup Boundary Conditions
# X - Boundary Conditions
solver.bc_lower[0] = pyclaw.BC.extrap
solver.bc_upper[0] = pyclaw.BC.extrap
# Y - Boundary Conditions
solver.bc_lower[1] = pyclaw.BC.extrap
solver.bc_upper[1] = pyclaw.BC.extrap
# Z - Boundary Conditions
solver.bc_lower[2] = pyclaw.BC.custom
solver.bc_upper[2] = pyclaw.BC.custom
solver.user_bc_lower = customBCLowerZ
solver.user_bc_upper = customBCUpperZ
# Aux - Boundary Conditions
solver.aux_bc_lower[0] = pyclaw.BC.extrap
solver.aux_bc_upper[0] = pyclaw.BC.extrap
solver.aux_bc_lower[1] = pyclaw.BC.extrap
solver.aux_bc_upper[1] = pyclaw.BC.extrap
solver.aux_bc_lower[2] = pyclaw.BC.custom
solver.aux_bc_upper[2] = pyclaw.BC.custom
solver.user_aux_bc_lower = customAuxBCLowerZ
solver.user_aux_bc_upper = customAuxBCUpperZ
# Solver Parameters
claw = pyclaw.Controller()
claw.verbosity = 4
claw.solution = pyclaw.Solution(state,domain)
claw.solver = solver
claw.output_format = output_format
claw.output_file_prefix = file_prefix
claw.keep_copy = False
if disable_output:
claw.output_format = None
claw.tfinal = tfinal
claw.num_output_times = num_output_times
claw.outdir = outdir
#state.mp = 1
#claw.compute_p = outputDensity
return claw
# __main__()
if __name__=="__main__":
from clawpack.pyclaw.util import run_app_from_main
output = run_app_from_main(euler3d)
| p0 = [1.28255457e+02,2.45768842e+01,4.14947876e+00,6.29750420e-01,1.01220380e-01,2.64133921e-02,1.22941741e-02,7.08667395e-03,4.52931611e-03,3.07286214e-03,2.16905463e-03,1.57652477e-03,1.17092484e-03,8.84611067e-04,6.77691403e-04,5.25138237e-04,4.10841768e-04,3.24102394e-04,2.57470120e-04,2.05925021e-04,1.65598592e-04,1.33701518e-04,1.08364754e-04,8.82441931e-05,7.21143717e-05,5.91376054e-05,4.86178229e-05,4.00787900e-05,3.30908693e-05,2.73888126e-05,2.27031016e-05,1.88518481e-05,1.56898948e-05,1.30700401e-05,1.08991559e-05,9.09869161e-06,7.60521743e-06,6.36376491e-06,5.32972657e-06,4.46856235e-06,3.74878325e-06,3.14890785e-06,2.64613146e-06,2.22646032e-06,1.87396531e-06,1.57844875e-06,1.33028392e-06,1.12211091e-06,9.47071388e-07,7.99762122e-07,6.75921511e-07,5.71493939e-07,4.83610358e-07,4.09325094e-07,3.46744110e-07,2.93793938e-07,2.49152408e-07,2.11367113e-07,1.79432411e-07,1.52415843e-07,1.29549499e-07,1.10136422e-07,9.37086690e-08,7.97324669e-08,6.79127210e-08,5.78532722e-08,4.93172661e-08,4.20604343e-08,3.58836884e-08,3.06389102e-08,2.61608771e-08,2.23557534e-08,1.91042726e-08,1.63479490e-08,1.39976779e-08,1.19853352e-08,1.02623231e-08,8.78713846e-09,7.53940212e-09,6.46885245e-09,5.55032464e-09,4.76222864e-09,4.09020086e-09,3.51658796e-09]
rho0 = [1.93347036e-07,4.03984315e-08,7.33795328e-09,1.16964004e-09,1.64049100e-10,2.53990286e-11,7.54287116e-12,3.40478277e-12,1.84556481e-12,1.10964372e-12,7.13581470e-13,4.81506393e-13,3.36472592e-13,2.41540079e-13,1.77156053e-13,1.32213794e-13,1.00089557e-13,7.67024111e-14,5.93930647e-14,4.64294817e-14,3.65782332e-14,2.90138753e-14,2.31378048e-14,1.85800114e-14,1.49929512e-14,1.21526733e-14,9.89015561e-15,8.07840567e-15,6.61976992e-15,5.43890503e-15,4.48202167e-15,3.70250573e-15,3.06590093e-15,2.54266886e-15,2.11283102e-15,1.75827860e-15,1.46560471e-15,1.22337830e-15,1.02239821e-15,8.55585508e-16,7.16578299e-16,6.01033981e-16,5.04419184e-16,4.23940996e-16,3.56468062e-16,2.99992883e-16,2.52633808e-16,2.12955966e-16,1.79630105e-16,1.51610996e-16,1.28075790e-16,1.08244792e-16,9.15665290e-17,7.74771188e-17,6.56137471e-17,5.55805979e-17,4.71251502e-17,3.99708405e-17,3.39261636e-17,2.88137888e-17,2.44878021e-17,2.08159094e-17,1.77092661e-17,1.50666724e-17,1.28321441e-17,1.09306468e-17,9.31730480e-18,7.94587120e-18,6.77866202e-18,5.78764327e-18,4.94156316e-18,4.22266806e-18,3.60840539e-18,3.08771188e-18,2.64374425e-18,2.26362608e-18,1.93817162e-18,1.65953699e-18,1.42386938e-18,1.22167290e-18,1.04819271e-18,8.99349679e-19,7.72429901e-19,6.64098458e-19]
Mavg = [28.85614554,28.85337155,28.83817654,28.56226512,27.60224909,26.26692289,25.23573593,24.45469565,23.79308533,23.18781005,22.61490394,22.07318988,21.55703223,21.06778441,20.60540309,20.17202267,19.76585711,19.38847601,19.0408475, 18.71970337,18.42758099,18.16274099,17.92359740,17.70606183,17.51035814,17.33530373,17.17893585,17.03979933,16.91620578,16.80712079,16.71028376,16.62471452,16.54940299,16.48292773,16.42454596,16.37307369,16.32776306,16.28801338,16.2531155, 16.22247335,16.19551611,16.17188138,16.15108306,16.13288090,16.11686426,16.10282002,16.09046507,16.07960946,16.07007411,16.06169374,16.05433222,16.04784993,16.04215209,16.03712679,16.0327204,16.02883120,16.02540929,16.02239140,16.01973516,16.01738918,16.01531699,16.01348647,16.01187781,16.01045286,16.00919766,16.00808580,16.00710454,16.00623687,16.00546792,16.00478755,16.00418349,16.00365220,16.00317996,16.00276269,16.00239247,16.00206303,16.00176987,16.00150902,16.00127962,16.00107519,16.00089299,16.00073063,16.00058692,16.00045964]
return p0,rho0,Mavg |
exportAsYaml.go | /*
Copyright © 2020 Veith Zäch <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
package cmd
import (
"github.com/eclipse/eclipsefuro/furo/internal/cmd/exportAsYaml"
"github.com/spf13/cobra"
)
// exportAsYamlCmd represents the exportAsYaml command
var exportAsYamlCmd = &cobra.Command{
Use: "exportAsYaml",
Short: "Exports all specs and the current config in one yaml file to stdout",
Long: `Use this for your chain of generators...
You will get a yaml with all types and services and the config.
Feel free to add custom sections in the config to use them in custom commands or scripts.
services:
your.Service: ...
types:
your.type: ...
config:
module: mod
custom:
remoteDir: "path/to/somewhere"
otherCustomSetting: true
`,
Run: exportAsYaml.Run,
}
func in | {
rootCmd.AddCommand(exportAsYamlCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// exportAsYamlCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// exportAsYamlCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
exportAsYamlCmd.Flags().BoolP("full", "f", false, "Include the ast info")
}
| it() |
operator.pb.go | // ------------------------------------------------------------
// Copyright (c) Microsoft Corporation and Dapr Contributors.
// Licensed under the MIT License.
// ------------------------------------------------------------
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.26.0
// protoc v3.14.0
// source: dapr/proto/operator/v1/operator.proto
package operator
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
emptypb "google.golang.org/protobuf/types/known/emptypb"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// ListComponentsRequest is the request to get components for a sidecar in namespace.
type ListComponentsRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
}
func (x *ListComponentsRequest) Reset() {
*x = ListComponentsRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_dapr_proto_operator_v1_operator_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListComponentsRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListComponentsRequest) ProtoMessage() {}
func (x *ListComponentsRequest) ProtoReflect() protoreflect.Message {
mi := &file_dapr_proto_operator_v1_operator_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListComponentsRequest.ProtoReflect.Descriptor instead.
func (*ListComponentsRequest) Descriptor() ([]byte, []int) {
return file_dapr_proto_operator_v1_operator_proto_rawDescGZIP(), []int{0}
}
func (x *ListComponentsRequest) GetNamespace() string {
if x != nil {
return x.Namespace
}
return ""
}
// ComponentUpdateRequest is the request to get updates about new components for a given namespace.
type ComponentUpdateRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
}
func (x *ComponentUpdateRequest) Reset() {
*x = ComponentUpdateRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_dapr_proto_operator_v1_operator_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ComponentUpdateRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ComponentUpdateRequest) ProtoMessage() {}
func (x *ComponentUpdateRequest) ProtoReflect() protoreflect.Message {
mi := &file_dapr_proto_operator_v1_operator_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ComponentUpdateRequest.ProtoReflect.Descriptor instead.
func (*ComponentUpdateRequest) Descriptor() ([]byte, []int) {
return file_dapr_proto_operator_v1_operator_proto_rawDescGZIP(), []int{1}
}
func (x *ComponentUpdateRequest) GetNamespace() string {
if x != nil {
return x.Namespace
}
return ""
}
// ComponentUpdateEvent includes the updated component event.
type ComponentUpdateEvent struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Component []byte `protobuf:"bytes,1,opt,name=component,proto3" json:"component,omitempty"`
}
func (x *ComponentUpdateEvent) Reset() {
*x = ComponentUpdateEvent{}
if protoimpl.UnsafeEnabled {
mi := &file_dapr_proto_operator_v1_operator_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ComponentUpdateEvent) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ComponentUpdateEvent) ProtoMessage() {}
func (x *ComponentUpdateEvent) ProtoReflect() protoreflect.Message {
mi := &file_dapr_proto_operator_v1_operator_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ComponentUpdateEvent.ProtoReflect.Descriptor instead.
func (*ComponentUpdateEvent) Descriptor() ([]byte, []int) {
return file_dapr_proto_operator_v1_operator_proto_rawDescGZIP(), []int{2}
}
func (x *ComponentUpdateEvent) GetComponent() []byte {
if x != nil {
return x.Component
}
return nil
}
// ListComponentResponse includes the list of available components.
type ListComponentResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Components [][]byte `protobuf:"bytes,1,rep,name=components,proto3" json:"components,omitempty"`
}
func (x *ListComponentResponse) Reset() {
*x = ListComponentResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_dapr_proto_operator_v1_operator_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListComponentResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListComponentResponse) ProtoMessage() {}
func (x *ListComponentResponse) ProtoReflect() protoreflect.Message {
mi := &file_dapr_proto_operator_v1_operator_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListComponentResponse.ProtoReflect.Descriptor instead.
func (*ListComponentResponse) Descriptor() ([]byte, []int) {
return file_dapr_proto_operator_v1_operator_proto_rawDescGZIP(), []int{3}
}
func (x *ListComponentResponse) GetComponents() [][]byte {
if x != nil {
return x.Components
}
return nil
}
// GetConfigurationRequest is the request message to get the configuration.
type GetConfigurationRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"`
}
func (x *GetConfigurationRequest) Reset() {
*x = GetConfigurationRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_dapr_proto_operator_v1_operator_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetConfigurationRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetConfigurationRequest) ProtoMessage() {}
func (x *GetConfigurationRequest) ProtoReflect() protoreflect.Message {
mi := &file_dapr_proto_operator_v1_operator_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetConfigurationRequest.ProtoReflect.Descriptor instead.
func (*GetConfigurationRequest) Descriptor() ([]byte, []int) {
return file_dapr_proto_operator_v1_operator_proto_rawDescGZIP(), []int{4}
}
func (x *GetConfigurationRequest) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *GetConfigurationRequest) GetNamespace() string {
if x != nil {
return x.Namespace
}
return ""
}
// GetConfigurationResponse includes the requested configuration.
type GetConfigurationResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Configuration []byte `protobuf:"bytes,1,opt,name=configuration,proto3" json:"configuration,omitempty"`
}
func (x *GetConfigurationResponse) Reset() {
*x = GetConfigurationResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_dapr_proto_operator_v1_operator_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetConfigurationResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetConfigurationResponse) ProtoMessage() {}
func (x *GetConfigurationResponse) ProtoReflect() protoreflect.Message {
mi := &file_dapr_proto_operator_v1_operator_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetConfigurationResponse.ProtoReflect.Descriptor instead.
func (*GetConfigurationResponse) Descriptor() ([]byte, []int) {
return file_dapr_proto_operator_v1_operator_proto_rawDescGZIP(), []int{5}
}
func (x *GetConfigurationResponse) GetConfiguration() []byte {
if x != nil {
return x.Configuration
}
return nil
}
// ListSubscriptionsResponse includes pub/sub subscriptions.
type ListSubscriptionsResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Subscriptions [][]byte `protobuf:"bytes,1,rep,name=subscriptions,proto3" json:"subscriptions,omitempty"`
}
func (x *ListSubscriptionsResponse) Reset() {
*x = ListSubscriptionsResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_dapr_proto_operator_v1_operator_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListSubscriptionsResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListSubscriptionsResponse) ProtoMessage() {}
func (x *ListSubscriptionsResponse) ProtoReflect() protoreflect.Message {
mi := &file_dapr_proto_operator_v1_operator_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListSubscriptionsResponse.ProtoReflect.Descriptor instead.
func (*ListSubscriptionsResponse) Descriptor() ([]byte, []int) {
return file_dapr_proto_operator_v1_operator_proto_rawDescGZIP(), []int{6}
}
func (x *ListSubscriptionsResponse) GetSubscriptions() [][]byte {
if x != nil {
return x.Subscriptions
}
return nil
}
var File_dapr_proto_operator_v1_operator_proto protoreflect.FileDescriptor
var file_dapr_proto_operator_v1_operator_proto_rawDesc = []byte{
0x0a, 0x25, 0x64, 0x61, 0x70, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x6f, 0x70, 0x65,
0x72, 0x61, 0x74, 0x6f, 0x72, 0x2f, 0x76, 0x31, 0x2f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f,
0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x16, 0x64, 0x61, 0x70, 0x72, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x2e, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x1a,
0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66,
0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x35, 0x0a, 0x15,
0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65,
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
0x61, 0x63, 0x65, 0x22, 0x36, 0x0a, 0x16, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74,
0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a,
0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0x34, 0x0a, 0x14, 0x43,
0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x45, 0x76,
0x65, 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74,
0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e,
0x74, 0x22, 0x37, 0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65,
0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f,
0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x0a,
0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x4b, 0x0a, 0x17, 0x47, 0x65,
0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65,
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20,
0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d,
0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61,
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0x40, 0x0a, 0x18, 0x47, 0x65, 0x74, 0x43, 0x6f,
0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f,
0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x0d, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61,
0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0d, 0x63, 0x6f, 0x6e, 0x66,
0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x41, 0x0a, 0x19, 0x4c, 0x69, 0x73,
0x74, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65,
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x0d, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72,
0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x0d, 0x73,
0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x32, 0xcc, 0x03, 0x0a,
0x08, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x73, 0x0a, 0x0f, 0x43, 0x6f, 0x6d,
0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x2e, 0x2e, 0x64,
0x61, 0x70, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74,
0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55,
0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x64,
0x61, 0x70, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74,
0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55,
0x70, 0x64, 0x61, 0x74, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x30, 0x01, 0x12, 0x70,
0x0a, 0x0e, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73,
0x12, 0x2d, 0x2e, 0x64, 0x61, 0x70, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x6f, 0x70,
0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f,
0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
0x2d, 0x2e, 0x64, 0x61, 0x70, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x6f, 0x70, 0x65,
0x72, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6d,
0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00,
0x12, 0x77, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61,
0x74, 0x69, 0x6f, 0x6e, 0x12, 0x2f, 0x2e, 0x64, 0x61, 0x70, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x2e, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65,
0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65,
0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x30, 0x2e, 0x64, 0x61, 0x70, 0x72, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x2e, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x47,
0x65, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52,
0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x60, 0x0a, 0x11, 0x4c, 0x69, 0x73,
0x74, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x16,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66,
0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x31, 0x2e, 0x64, 0x61, 0x70, 0x72, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x2e, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e,
0x4c, 0x69, 0x73, 0x74, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e,
0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x35, 0x5a, 0x33, 0x67,
0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x61, 0x70, 0x72, 0x2f, 0x64,
0x61, 0x70, 0x72, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x6f, 0x70,
0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x2f, 0x76, 0x31, 0x3b, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74,
0x6f, 0x72, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_dapr_proto_operator_v1_operator_proto_rawDescOnce sync.Once
file_dapr_proto_operator_v1_operator_proto_rawDescData = file_dapr_proto_operator_v1_operator_proto_rawDesc
)
func file_dapr_proto_operator_v1_operator_proto_rawDescGZIP() []byte {
file_dapr_proto_operator_v1_operator_proto_rawDescOnce.Do(func() {
file_dapr_proto_operator_v1_operator_proto_rawDescData = protoimpl.X.CompressGZIP(file_dapr_proto_operator_v1_operator_proto_rawDescData)
})
return file_dapr_proto_operator_v1_operator_proto_rawDescData
}
var file_dapr_proto_operator_v1_operator_proto_msgTypes = make([]protoimpl.MessageInfo, 7)
var file_dapr_proto_operator_v1_operator_proto_goTypes = []interface{}{
(*ListComponentsRequest)(nil), // 0: dapr.proto.operator.v1.ListComponentsRequest
(*ComponentUpdateRequest)(nil), // 1: dapr.proto.operator.v1.ComponentUpdateRequest
(*ComponentUpdateEvent)(nil), // 2: dapr.proto.operator.v1.ComponentUpdateEvent
(*ListComponentResponse)(nil), // 3: dapr.proto.operator.v1.ListComponentResponse
(*GetConfigurationRequest)(nil), // 4: dapr.proto.operator.v1.GetConfigurationRequest
(*GetConfigurationResponse)(nil), // 5: dapr.proto.operator.v1.GetConfigurationResponse
(*ListSubscriptionsResponse)(nil), // 6: dapr.proto.operator.v1.ListSubscriptionsResponse
(*emptypb.Empty)(nil), // 7: google.protobuf.Empty
}
var file_dapr_proto_operator_v1_operator_proto_depIdxs = []int32{
1, // 0: dapr.proto.operator.v1.Operator.ComponentUpdate:input_type -> dapr.proto.operator.v1.ComponentUpdateRequest
0, // 1: dapr.proto.operator.v1.Operator.ListComponents:input_type -> dapr.proto.operator.v1.ListComponentsRequest
4, // 2: dapr.proto.operator.v1.Operator.GetConfiguration:input_type -> dapr.proto.operator.v1.GetConfigurationRequest
7, // 3: dapr.proto.operator.v1.Operator.ListSubscriptions:input_type -> google.protobuf.Empty
2, // 4: dapr.proto.operator.v1.Operator.ComponentUpdate:output_type -> dapr.proto.operator.v1.ComponentUpdateEvent
3, // 5: dapr.proto.operator.v1.Operator.ListComponents:output_type -> dapr.proto.operator.v1.ListComponentResponse
5, // 6: dapr.proto.operator.v1.Operator.GetConfiguration:output_type -> dapr.proto.operator.v1.GetConfigurationResponse
6, // 7: dapr.proto.operator.v1.Operator.ListSubscriptions:output_type -> dapr.proto.operator.v1.ListSubscriptionsResponse
4, // [4:8] is the sub-list for method output_type
0, // [0:4] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_dapr_proto_operator_v1_operator_proto_init() }
func file_dapr_proto_operator_v1_operator_proto_init() | {
if File_dapr_proto_operator_v1_operator_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_dapr_proto_operator_v1_operator_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListComponentsRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_dapr_proto_operator_v1_operator_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ComponentUpdateRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_dapr_proto_operator_v1_operator_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ComponentUpdateEvent); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_dapr_proto_operator_v1_operator_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListComponentResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_dapr_proto_operator_v1_operator_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetConfigurationRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_dapr_proto_operator_v1_operator_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetConfigurationResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_dapr_proto_operator_v1_operator_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListSubscriptionsResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_dapr_proto_operator_v1_operator_proto_rawDesc,
NumEnums: 0,
NumMessages: 7,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_dapr_proto_operator_v1_operator_proto_goTypes,
DependencyIndexes: file_dapr_proto_operator_v1_operator_proto_depIdxs,
MessageInfos: file_dapr_proto_operator_v1_operator_proto_msgTypes,
}.Build()
File_dapr_proto_operator_v1_operator_proto = out.File
file_dapr_proto_operator_v1_operator_proto_rawDesc = nil
file_dapr_proto_operator_v1_operator_proto_goTypes = nil
file_dapr_proto_operator_v1_operator_proto_depIdxs = nil
} |
|
test_odata_query.py | import unittest
from msgraph_async.common.odata_query import *
class TestODataQuery(unittest.TestCase):
def setUp(self):
pass
@classmethod
def setUpClass(cls):
pass
def get_instance(self):
return ODataQuery()
def test_empty_odata(self):
i = self.get_instance()
self.assertEqual("EMPTY OPEN DATA QUERY", str(i))
def test_count_set_bad_value(self):
i = self.get_instance()
try:
i.count = 1
self.fail()
except ValueError:
pass
def test_count_set(self):
i = self.get_instance()
i.count = True
def test_expand_set_bad_value(self):
i = self.get_instance()
try:
i.expand = 1
self.fail()
except ValueError:
pass
def test_expand_set(self):
i = self.get_instance()
i.expand = "groups"
| i = self.get_instance()
try:
i.filter = 1
self.fail()
except ValueError:
pass
def test_select_set_bad_value(self):
i = self.get_instance()
try:
i.select = 1
self.fail()
except ValueError:
pass
def test_select_set_bad_value2(self):
i = self.get_instance()
try:
i.select = ["valid", 10]
self.fail()
except ValueError:
pass
def test_select_set(self):
i = self.get_instance()
i.select = ["firstName", "lastName"]
def test_top_set_bad_value(self):
i = self.get_instance()
try:
i.top = "10"
self.fail()
except ValueError:
pass
def test_top_set(self):
i = self.get_instance()
i.top = 10
def test_top_query(self):
i = self.get_instance()
i.top = 10
self.assertEqual(str(i), "?$top=10")
def test_top_and_select_query(self):
i = self.get_instance()
i.top = 10
i.select = ["subject", "sender"]
self.assertEqual(str(i), "?$select=subject,sender&$top=10")
def test_filter_query(self):
i = self.get_instance()
constrain1 = Constrain("city", LogicalOperator.NE, "New-York")
constrain2 = Constrain("displayName", LogicalOperator.EQ, "Noam Meirovitch")
f = Filter([constrain1, constrain2], LogicalConnector.OR)
i.filter = f
self.assertEqual("?$filter=city ne New-York or displayName eq Noam Meirovitch", str(i))
def test_count_expand_filter_select_top_query(self):
i = self.get_instance()
constrain1 = Constrain("city", LogicalOperator.NE, "New-York")
constrain2 = Constrain("displayName", LogicalOperator.EQ, "Noam Meirovitch")
f = Filter([constrain1, constrain2], LogicalConnector.OR)
i.count = True
i.expand = "groups"
i.filter = f
i.top = 15
i.select = ["displayName", "firstName", "lastName"]
self.assertEqual("?$count=true&$expand=groups&$filter=city ne New-York or displayName eq Noam Meirovitch&$select=displayName,firstName,lastName&$top=15", str(i)) | def test_filter_set_bad_value(self): |
set-environment-variables.js | /*
Copyright 2019 Adobe. All rights reserved.
This file is licensed to you under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. You may obtain a copy
of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under
the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
OF ANY KIND, either express or implied. See the License for the specific language
governing permissions and limitations under the License.
*/
const BaseEnvironmentVariablesCommand = require('../../base-environment-variables-command')
const BaseVariablesCommand = require('../../base-variables-command')
const { accessToken: getAccessToken } = require('@adobe/aio-cli-plugin-jwt-auth')
const { getApiKey, getBaseUrl, getOrgId, sanitizeEnvironmentId } = require('../../cloudmanager-helpers')
const { init } = require('@adobe/aio-lib-cloudmanager')
const commonFlags = require('../../common-flags')
| const accessToken = await getAccessToken(passphrase)
const orgId = await getOrgId()
const baseUrl = await getBaseUrl()
const sdk = await init(orgId, apiKey, accessToken, baseUrl)
return sdk.setEnvironmentVariables(programId, environmentId, variables)
}
class SetEnvironmentVariablesCommand extends BaseEnvironmentVariablesCommand {
async run () {
const { args, flags } = this.parse(SetEnvironmentVariablesCommand)
return this.runSet(args, flags)
}
async setVariables (programId, args, variables, passphrase = null) {
const environmentId = sanitizeEnvironmentId(args.environmentId)
return _setEnvironmentVariables(programId, environmentId, variables, passphrase)
}
}
SetEnvironmentVariablesCommand.description = 'sets variables set on an environment. These are runtime variables available to components running inside the runtime environment. Use set-pipeline-variables to set build-time variables on a pipeline.'
SetEnvironmentVariablesCommand.args = [
{ name: 'environmentId', required: true, description: 'the environment id' }
]
SetEnvironmentVariablesCommand.flags = {
...commonFlags.global,
...commonFlags.programId,
...BaseVariablesCommand.flags
}
module.exports = SetEnvironmentVariablesCommand | async function _setEnvironmentVariables (programId, environmentId, variables, passphrase) {
const apiKey = await getApiKey() |
nodes.go | /*
Copyright 2018 Gravitational, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package kubernetes
import (
"context"
"encoding/json"
"fmt"
"github.com/gravitational/gravity/lib/defaults"
"github.com/gravitational/gravity/lib/storage"
"github.com/gravitational/gravity/lib/utils"
"github.com/cenkalti/backoff"
"github.com/gravitational/rigging"
"github.com/gravitational/trace"
log "github.com/sirupsen/logrus"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/types"
"k8s.io/apimachinery/pkg/util/strategicpatch"
"k8s.io/client-go/kubernetes"
corev1 "k8s.io/client-go/kubernetes/typed/core/v1"
)
// Drain safely drains the specified node and uses Eviction API if supported on the api server.
func Drain(ctx context.Context, client *kubernetes.Clientset, nodeName string) error |
// SetUnschedulable marks the specified node as unschedulable depending on the value of the specified flag.
// Retries the operation internally on update conflicts.
func SetUnschedulable(ctx context.Context, client corev1.NodeInterface, nodeName string, unschedulable bool) error {
node, err := client.Get(ctx, nodeName, metav1.GetOptions{})
if err != nil {
return rigging.ConvertError(err)
}
if node.Spec.Unschedulable == unschedulable {
log := log.WithField("node", nodeName)
if unschedulable {
log.Debug("already cordoned")
} else {
log.Debug("already uncordoned")
}
// No update
return nil
}
err = Retry(ctx, func() error {
return trace.Wrap(setUnschedulable(client, nodeName, unschedulable))
})
return rigging.ConvertError(err)
}
// UpdateTaints adds and/or removes taints specified with add/remove correspondingly on the specified node.
func UpdateTaints(ctx context.Context, client corev1.NodeInterface, nodeName string, taintsToAdd []v1.Taint, taintsToRemove []v1.Taint) error {
node, err := client.Get(ctx, nodeName, metav1.GetOptions{})
if err != nil {
return rigging.ConvertError(err)
}
newTaints := append([]v1.Taint{}, taintsToAdd...)
oldTaints := node.Spec.Taints
// add taints that already exist but are not updated to newTaints
added := addTaints(oldTaints, &newTaints)
deleted, err := deleteTaints(taintsToRemove, &newTaints)
if err != nil {
return trace.Wrap(err)
}
if !added && !deleted {
// No update
return nil
}
err = Retry(ctx, func() error {
return trace.Wrap(updateTaints(client, nodeName, newTaints))
})
return rigging.ConvertError(err)
}
// UpdateLabels adds labels on the node specified with nodeName
func UpdateLabels(ctx context.Context, client corev1.NodeInterface, nodeName string, labels map[string]string) error {
err := Retry(ctx, func() error {
return trace.Wrap(updateLabels(client, nodeName, labels))
})
return rigging.ConvertError(err)
}
// GetNode returns Kubernetes node corresponding to the provided server
func GetNode(client *kubernetes.Clientset, server storage.Server) (*v1.Node, error) {
nodes, err := client.CoreV1().Nodes().
List(context.TODO(), metav1.ListOptions{
LabelSelector: utils.MakeSelector(map[string]string{
v1.LabelHostname: server.KubeNodeID(),
}).String(),
})
if err != nil {
return nil, rigging.ConvertErrorWithContext(err,
"failed to list Kubernetes nodes")
}
if len(nodes.Items) == 0 {
return nil, trace.NotFound(
"could not find a Kubernetes node for %v", server).
AddField("label", fmt.Sprintf("%v=%v", v1.LabelHostname, server.KubeNodeID()))
}
if len(nodes.Items) > 1 {
return nil, trace.BadParameter(
"found more than 1 Kubernetes node for %v: %v", server, nodes.Items)
}
return &nodes.Items[0], nil
}
// setUnschedulable sets unschedulable status on the node given with nodeName
func setUnschedulable(client corev1.NodeInterface, nodeName string, unschedulable bool) error {
node, err := client.Get(context.TODO(), nodeName, metav1.GetOptions{})
if err != nil {
return rigging.ConvertError(err)
}
oldData, err := json.Marshal(node)
if err != nil {
return rigging.ConvertError(err)
}
node.Spec.Unschedulable = unschedulable
newData, err := json.Marshal(node)
if err != nil {
return rigging.ConvertError(err)
}
patchBytes, patchErr := strategicpatch.CreateTwoWayMergePatch(oldData, newData, node)
if patchErr == nil {
_, err = client.Patch(context.TODO(), node.Name,
types.StrategicMergePatchType, patchBytes, metav1.PatchOptions{})
} else {
log.WithError(err).Warn("Failed to patch node object.")
_, err = client.Update(context.TODO(), node, metav1.UpdateOptions{})
}
return rigging.ConvertError(err)
}
// updateTaints updates taints on the node given with nodeName from newTaints
func updateTaints(client corev1.NodeInterface, nodeName string, newTaints []v1.Taint) error {
node, err := client.Get(context.TODO(), nodeName, metav1.GetOptions{})
if err != nil {
return trace.Wrap(err)
}
node.Spec.Taints = newTaints
_, err = client.Update(context.TODO(), node, metav1.UpdateOptions{})
return rigging.ConvertError(err)
}
// updateLabels updates labels on the node specified with nodeName
func updateLabels(client corev1.NodeInterface, nodeName string, labels map[string]string) error {
node, err := client.Get(context.TODO(), nodeName, metav1.GetOptions{})
if err != nil {
return trace.Wrap(err)
}
for name, value := range labels {
node.Labels[name] = value
}
_, err = client.Update(context.TODO(), node, metav1.UpdateOptions{})
return rigging.ConvertError(err)
}
// deleteTaints deletes the given taints from the node's list of taints
func deleteTaints(taintsToDelete []v1.Taint, newTaints *[]v1.Taint) (deleted bool, err error) {
var errors []error
for _, taintToDelete := range taintsToDelete {
deleted = false
if len(taintToDelete.Effect) > 0 {
*newTaints, deleted = deleteTaint(*newTaints, &taintToDelete)
} else {
*newTaints, deleted = deleteTaintsByKey(*newTaints, taintToDelete.Key)
}
if !deleted {
errors = append(errors, trace.NotFound("taint %q not found", taintToDelete.ToString()))
}
}
if len(errors) != 0 {
if len(errors) == 1 {
return false, trace.Wrap(errors[0])
}
return false, trace.NewAggregate(errors...)
}
return true, nil
}
// deleteTaintsByKey removes all the taints that have the same key to given taintKey
func deleteTaintsByKey(taints []v1.Taint, taintKey string) (result []v1.Taint, deleted bool) {
for _, taint := range taints {
if taintKey == taint.Key {
deleted = true
continue
}
result = append(result, taint)
}
return result, deleted
}
// deleteTaint removes all the taints that have the same key and effect to given taintToDelete.
func deleteTaint(taints []v1.Taint, taintToDelete *v1.Taint) (result []v1.Taint, deleted bool) {
for i := range taints {
if taintToDelete.MatchTaint(&taints[i]) {
deleted = true
continue
}
result = append(result, taints[i])
}
return result, deleted
}
// addTaints adds the newTaints list to existing ones and updates the newTaints list.
// TODO: This needs a rewrite to take only the new values instead of appended newTaints list to be consistent.
func addTaints(oldTaints []v1.Taint, newTaints *[]v1.Taint) bool {
for _, oldTaint := range oldTaints {
existsInNew := false
for _, taint := range *newTaints {
if taint.MatchTaint(&oldTaint) {
existsInNew = true
break
}
}
if !existsInNew {
*newTaints = append(*newTaints, oldTaint)
}
}
return len(oldTaints) != len(*newTaints)
}
// Retry retries the specified function fn using classify to determine
// whether to Retry a particular error.
// Returns the first permanent error
func Retry(ctx context.Context, fn func() error) error {
interval := backoff.NewExponentialBackOff()
err := utils.RetryWithInterval(ctx, interval, func() error {
return RetryOnUpdateConflict(fn())
})
return trace.Wrap(err)
}
| {
err := SetUnschedulable(ctx, client.CoreV1().Nodes(), nodeName, true)
if err != nil {
return trace.Wrap(err)
}
d := drainer{
client: client,
nodeName: nodeName,
gracePeriodSeconds: defaults.ResourceGracePeriod,
}
err = d.drainPods(ctx)
return trace.Wrap(err)
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.