file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
validator.go
|
// SPDX-FileCopyrightText: 2020 SAP SE or an SAP affiliate company and Gardener contributors
//
// SPDX-License-Identifier: Apache-2.0
package reactor
import (
"context"
"errors"
"fmt"
"github.com/gardener/docforge/pkg/jobs"
"github.com/gardener/docforge/pkg/resourcehandlers"
"github.com/gardener/docforge/pkg/util/httpclient"
"github.com/gardener/docforge/pkg/util/urls"
"k8s.io/klog/v2"
"math/rand"
"net/http"
"reflect"
"strings"
"time"
)
//counterfeiter:generate . Validator
type Validator interface {
// ValidateLink checks if the link URL is available in a separate goroutine
// returns true if the task was added for processing, false if it was skipped
ValidateLink(linkUrl *urls.URL, linkDestination, contentSourcePath string) bool
}
type validator struct {
queue *jobs.JobQueue
}
func
|
(queue *jobs.JobQueue) Validator {
return &validator{
queue: queue,
}
}
func (v *validator) ValidateLink(linkUrl *urls.URL, linkDestination, contentSourcePath string) bool {
vTask := &ValidationTask{
LinkUrl: linkUrl,
LinkDestination: linkDestination,
ContentSourcePath: contentSourcePath,
}
added := v.queue.AddTask(vTask)
if !added {
klog.Warningf("link validation failed for task %v\n", vTask)
}
return added
}
type ValidationTask struct {
LinkUrl *urls.URL
LinkDestination string
ContentSourcePath string
}
type validatorWorker struct {
httpClient httpclient.Client
resourceHandlers resourcehandlers.Registry
}
// Validate checks if validationTask.LinkUrl is available and if it cannot be reached, a warning is logged
func (v *validatorWorker) Validate(ctx context.Context, task interface{}) error {
if vTask, ok := task.(*ValidationTask); ok {
// ignore sample hosts e.g. localhost
host := vTask.LinkUrl.Hostname()
if host == "localhost" || host == "127.0.0.1" || host == "1.2.3.4" || strings.Contains(host, "foo.bar") {
return nil
}
client := v.httpClient
// check if link absolute destination exists locally
absLinkDestination := vTask.LinkUrl.String()
handler := v.resourceHandlers.Get(absLinkDestination)
if handler != nil {
if _, err := handler.BuildAbsLink(vTask.ContentSourcePath, absLinkDestination); err == nil {
// no ErrResourceNotFound -> absolute destination exists locally
return nil
}
// get appropriate http client, if any
if handlerClient := handler.GetClient(); handlerClient != nil {
client = handlerClient
}
}
var (
req *http.Request
resp *http.Response
err error
)
// try HEAD
req, err = http.NewRequestWithContext(ctx, http.MethodHead, vTask.LinkUrl.String(), nil)
if err != nil {
return fmt.Errorf("failed to prepare HEAD validation request: %v", err)
}
resp, err = doValidation(req, client)
if err != nil {
klog.Warningf("failed to validate absolute link for %s from source %s: %v\n",
vTask.LinkDestination, vTask.ContentSourcePath, err)
return nil
}
// on error status code different from authorization error
if resp.StatusCode >= 400 && resp.StatusCode != http.StatusForbidden && resp.StatusCode != http.StatusUnauthorized {
req, err = http.NewRequestWithContext(ctx, http.MethodGet, vTask.LinkUrl.String(), nil)
if err != nil {
return fmt.Errorf("failed to prepare GET validation request: %v", err)
}
// retry GET
resp, err = doValidation(req, client)
if err != nil {
klog.Warningf("failed to validate absolute link for %s from source %s: %v\n",
vTask.LinkDestination, vTask.ContentSourcePath, err)
return nil
}
if resp.StatusCode >= 400 && resp.StatusCode != http.StatusForbidden && resp.StatusCode != http.StatusUnauthorized {
klog.Warningf("failed to validate absolute link for %s from source %s: %v\n",
vTask.LinkDestination, vTask.ContentSourcePath, fmt.Errorf("HTTP Status %s", resp.Status))
}
}
} else {
return fmt.Errorf("incorrect validation task: %T\n", task)
}
return nil
}
// doValidation performs several attempts to execute http request if http status code is 429
func doValidation(req *http.Request, client httpclient.Client) (*http.Response, error) {
intervals := []int{1, 5, 10, 20}
resp, err := client.Do(req)
if err != nil {
return resp, err
}
_ = resp.Body.Close()
attempts := 0
for resp.StatusCode == http.StatusTooManyRequests && attempts < len(intervals)-1 {
time.Sleep(time.Duration(intervals[attempts]+rand.Intn(attempts+1)) * time.Second)
resp, err = client.Do(req)
if err != nil {
return resp, err
}
_ = resp.Body.Close()
attempts++
}
return resp, err
}
func ValidateWorkerFunc(httpClient httpclient.Client, resourceHandlers resourcehandlers.Registry) (jobs.WorkerFunc, error) {
if httpClient == nil || reflect.ValueOf(httpClient).IsNil() {
return nil, errors.New("invalid argument: httpClient is nil")
}
if resourceHandlers == nil || reflect.ValueOf(resourceHandlers).IsNil() {
return nil, errors.New("invalid argument: resourceHandlers is nil")
}
vWorker := &validatorWorker{
httpClient: httpClient,
resourceHandlers: resourceHandlers,
}
return vWorker.Validate, nil
}
|
NewValidator
|
traceplusunittest.py
|
# Copyright (C) 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Includes locals in the stacktrace when a failure occurs.
#
# Example use:
#
# if __name__ == '__main__':
# try:
# import traceplusunittest
# except ImportError:
# unittest.main()
# else:
# traceplusunittest.main()
import unittest
from . import traceplus
import traceback
import inspect
class TextBigStackTestRunner(unittest.TextTestRunner):
def _makeResult(self):
return TextBigStackTestResult(self.stream, self.descriptions, self.verbosity)
class TextBigStackTestResult(unittest.TextTestResult):
|
def format_exception(exctype, value, tb, length=None):
frame_records = inspect.getinnerframes(tb, 3)
dump = []
if length is None:
dump.extend(traceplus.make_expanded_trace(frame_records))
else:
dump.extend(traceplus.make_expanded_trace(frame_records[:length]))
dump.extend(traceback.format_exception_only(exctype, value))
return ''.join(dump)
def main():
unittest.main(testRunner=TextBigStackTestRunner())
|
def _exc_info_to_string(self, err, test):
"""Converts a sys.exc_info()-style tuple of values into a string."""
exctype, value, tb = err
# Skip test runner traceback levels
while tb and self._is_relevant_tb_level(tb):
tb = tb.tb_next
if exctype is test.failureException:
# Skip assert*() traceback levels
length = self._count_relevant_tb_levels(tb)
return ''.join(format_exception(exctype, value, tb, length))
return ''.join(format_exception(exctype, value, tb))
|
getAutomationsIdEmailsId.js
|
/**
* Auto-generated action file for "Mailchimp Marketing API" API.
*
* Generated at: 2021-05-20T15:00:33.658Z
* Mass generator version: 1.0.0
*
* : Mailchimp-Component
* Copyright © 2020, AG
*
* All files of this connector are licensed under the Apache 2.0 License. For details
* see the file LICENSE on the toplevel directory.
*
*
* Operation: 'getAutomationsIdEmailsId'
* Endpoint Path: '/automations/{workflow_id}/emails/{workflow_email_id}'
* Method: 'get'
*
*/
// how to pass the transformation function... no need
// pass the meta data
// create a new Object
// emit the message with the new emit function
// securities and auth methods
// check how to make the new ferryman and its transform functions functional // no need
const Swagger = require('swagger-client');
const spec = require('../spec.json');
// this wrapers offers a simplified emitData(data) function
module.exports = {process: processAction};
// parameter names for this call
const PARAMETERS = [
"workflow_id",
"workflow_email_id"
];
// mappings from connector field names to API field names
const FIELD_MAP = {
"workflow_id": "workflow_id",
"workflow_email_id": "workflow_email_id"
};
function p
|
msg, cfg) {
var isVerbose = process.env.debug || cfg.verbose;
console.log("msg:",msg);
console.log("cfg:",cfg)
if (isVerbose) {
console.log(`---MSG: ${JSON.stringify(msg)}`);
console.log(`---CFG: ${JSON.stringify(cfg)}`);
console.log(`---ENV: ${JSON.stringify(process.env)}`);
}
const contentType = undefined;
const body = msg.data;
mapFieldNames(body);
let parameters = {};
for(let param of PARAMETERS) {
parameters[param] = body[param];
}
const oihUid = msg.metadata !== undefined && msg.metadata.oihUid !== undefined
? msg.metadata.oihUid
: 'oihUid not set yet';
const recordUid = msg.metadata !== undefined && msg.metadata.recordUid !== undefined
? msg.metadata.recordUid
: undefined;
const applicationUid = msg.metadata !== undefined && msg.metadata.applicationUid !== undefined
? msg.metadata.applicationUid
: undefined;
const newElement = {};
const oihMeta = {
applicationUid,
oihUid,
recordUid,
};
// credentials for this operation
let securities = {};
securities['basicAuth'] = {username: cfg.auth_username, password: cfg.auth_password};;
if(cfg.otherServer){
if(!spec.servers){
spec.servers = [];
}
spec.servers.push({"url":cfg.otherServer})
}
let callParams = {
spec: spec,
operationId: 'getAutomationsIdEmailsId',
pathName: '/automations/{workflow_id}/emails/{workflow_email_id}',
method: 'get',
parameters: parameters,
requestContentType: contentType,
requestBody: body,
securities: {authorized: securities},
server: spec.servers[cfg.server] || cfg.otherServer,
};
if(callParams.method === 'get'){
delete callParams.requestBody;
}
if (isVerbose) {
let out = Object.assign({}, callParams);
out.spec = '[omitted]';
console.log(`--SWAGGER CALL: ${JSON.stringify(out)}`);
}
// Call operation via Swagger client
return Swagger.execute(callParams).then(data => {
// emit a single message with data
// console.log("swagger data:",data);
delete data.uid;
newElement.metadata = oihMeta;
newElement.data = data.data
this.emit("data",newElement);
// if the response contains an array of entities, you can emit them one by one:
// data.obj.someItems.forEach((item) => {
// this.emitData(item);
// }
});
}
function mapFieldNames(obj) {
if(Array.isArray(obj)) {
obj.forEach(mapFieldNames);
}
else if(typeof obj === 'object' && obj) {
Object.keys(obj).forEach(key => {
mapFieldNames(obj[key]);
let goodKey = FIELD_MAP[key];
if(goodKey && goodKey !== key) {
obj[goodKey] = obj[key];
delete obj[key];
}
});
}
}
|
rocessAction(
|
search.rs
|
use super::structs::*;
use crate::searcher::*;
use crate::IndexFromFile;
use std::collections::HashMap;
use std::convert::TryInto;
fn deserialize(index: &IndexFromFile) -> Index {
let (version_size_bytes, rest) = index.split_at(std::mem::size_of::<u64>());
let version_size = u64::from_be_bytes(version_size_bytes.try_into().unwrap());
let (_version_bytes, rest) = rest.split_at(version_size as usize);
let (entries_size_bytes, rest) = rest.split_at(std::mem::size_of::<u64>());
let entries_size = u64::from_be_bytes(entries_size_bytes.try_into().unwrap());
let (entries_bytes, rest) = rest.split_at(entries_size as usize);
let entries = bincode::deserialize(entries_bytes).unwrap();
let (queries_size_bytes, rest) = rest.split_at(std::mem::size_of::<u64>());
let queries_size = u64::from_be_bytes(queries_size_bytes.try_into().unwrap());
let (queries_bytes, _rest) = rest.split_at(queries_size as usize);
let queries = bincode::deserialize(queries_bytes).unwrap();
Index { entries, queries }
}
impl OutputResult {
fn from(entry: &Entry, results: Vec<SearchResult>) -> OutputResult {
let entry_contents_by_word: Vec<String> = entry
.contents
.split_whitespace()
.map(|w| w.to_string())
.collect();
let entry_contents_by_word_len = entry_contents_by_word.len();
OutputResult {
entry: OutputEntry {
title: entry.title.clone(),
url: entry.url.clone(),
fields: entry.fields.clone(),
},
excerpts: results
.iter()
// sort by score
.map(|result| &result.excerpts)
.flatten()
.map(|excerpt| {
crate::searcher::Excerpt {
text: entry_contents_by_word[excerpt.word_index.saturating_sub(8)
..std::cmp::min(
excerpt.word_index.saturating_add(8),
entry_contents_by_word_len,
)]
.join(" "),
highlight_char_offset: entry_contents_by_word
[excerpt.word_index.saturating_sub(8)..excerpt.word_index]
.join(" ")
.len()
+ 1,
}
})
.collect(),
title_highlight_char_offset: None,
}
}
}
pub fn
|
(index: &IndexFromFile, query: &str) -> SearchOutput {
let index: Index = deserialize(index);
let normalized_query = query.to_lowercase();
let words_in_query: Vec<String> = normalized_query.split(' ').map(|s| s.to_string()).collect();
let first_word = &words_in_query[0];
let container: Container = index.queries.get(first_word).unwrap().to_owned();
#[derive(Debug)]
struct ScoreToContainer {
score: Score,
container: Container,
}
let mut aliased_results = container
.aliases
.keys()
.map(|alias| ScoreToContainer {
score: container.aliases.get(alias).unwrap().to_owned(),
container: index.queries.get(alias).unwrap().to_owned(),
})
.map(|score_to_container| {
let mut results = score_to_container.container.results;
for result in results.values_mut() {
result.score = score_to_container.score
}
results
})
.collect::<Vec<HashMap<EntryIndex, SearchResult>>>();
let mut all_results: Vec<HashMap<EntryIndex, SearchResult>> = vec![];
if !container.results.is_empty() {
all_results.append(&mut vec![container.results])
}
if !aliased_results.is_empty() {
all_results.append(&mut aliased_results);
}
let mut combined_results: HashMap<EntryIndex, Vec<SearchResult>> = HashMap::new();
for result_map in &all_results {
for (index, result) in result_map.iter() {
let result_vec = combined_results.entry(*index).or_insert_with(Vec::new);
result_vec.push(result.to_owned());
}
}
let output_results: Vec<OutputResult> = combined_results
.iter()
.map(|(entry_index, results)| {
let entry = &index.entries[*entry_index];
OutputResult::from(entry, results.to_vec())
})
.collect();
SearchOutput {
results: output_results[0..std::cmp::min(output_results.len(), 10)].to_vec(),
total_hit_count: output_results.len(),
}
}
|
search
|
fes2.py
|
# -*- coding: ISO-8859-15 -*-
# =============================================================================
# Copyright (c) 2021 Tom Kralidis
#
# Authors : Tom Kralidis <[email protected]>
#
# Contact email: [email protected]
# =============================================================================
"""
API for OGC Filter Encoding (FE) constructs and metadata.
Filter Encoding: http://www.opengeospatial.org/standards/filter
Supports version 2.0.2 (09-026r2).
"""
from owslib.etree import etree
from owslib import util
from owslib.namespaces import Namespaces
# default variables
def get_namespaces():
n = Namespaces()
ns = n.get_namespaces(["dif", "fes", "gml", "ogc", "ows110", "xs", "xsi"])
ns[None] = n.get_namespace("fes")
return ns
namespaces = get_namespaces()
schema = 'http://schemas.opengis.net/filter/2.0/filterAll.xsd'
schema_location = '%s %s' % (namespaces['fes'], schema)
class FilterRequest(object):
""" filter class """
def __init__(self, parent=None, version='2.0.0'):
"""
filter Constructor
Parameters
----------
- parent: parent etree.Element object (default is None)
- version: version (default is '2.0.0')
"""
self.version = version
self._root = etree.Element(util.nspath_eval('fes:Filter', namespaces))
if parent is not None:
self._root.set(util.nspath_eval('xsi:schemaLocation', namespaces), schema_location)
def set(self, parent=False, qtype=None, keywords=[], typenames='csw:Record', propertyname='csw:AnyText', bbox=None,
identifier=None):
"""
Construct and process a GetRecords request
Parameters
----------
- parent: the parent Element object. If this is not, then generate a standalone request
- qtype: type of resource to query (i.e. service, dataset)
- keywords: list of keywords
- propertyname: the ValueReference to Filter against
- bbox: the bounding box of the spatial query in the form [minx,miny,maxx,maxy]
- identifier: the dc:identifier to query against with a PropertyIsEqualTo. Ignores all other inputs.
"""
# Set the identifier if passed. Ignore other parameters
dc_identifier_equals_filter = None
if identifier is not None:
dc_identifier_equals_filter = PropertyIsEqualTo('dc:identifier', identifier)
self._root.append(dc_identifier_equals_filter.toXML())
return self._root
# Set the query type if passed
dc_type_equals_filter = None
if qtype is not None:
dc_type_equals_filter = PropertyIsEqualTo('dc:type', qtype)
# Set a bbox query if passed
bbox_filter = None
if bbox is not None:
bbox_filter = BBox(bbox)
# Set a keyword query if passed
keyword_filter = None
if len(keywords) > 0:
if len(keywords) > 1: # loop multiple keywords into an Or
ks = []
for i in keywords:
ks.append(PropertyIsLike(propertyname, "*%s*" % i, wildCard="*"))
keyword_filter = Or(operations=ks)
elif len(keywords) == 1: # one keyword
keyword_filter = PropertyIsLike(propertyname, "*%s*" % keywords[0], wildCard="*")
# And together filters if more than one exists
filters = [_f for _f in [keyword_filter, bbox_filter, dc_type_equals_filter] if _f]
if len(filters) == 1:
self._root.append(filters[0].toXML())
elif len(filters) > 1:
self._root.append(And(operations=filters).toXML())
return self._root
def setConstraint(self, constraint, tostring=False):
"""
Construct and process a GetRecords request
Parameters
----------
- constraint: An OgcExpression object
- tostring (optional): return as string
"""
self._root.append(constraint.toXML())
if tostring:
return util.element_to_string(self._root, xml_declaration=False)
return self._root
def setConstraintList(self, constraints, tostring=False):
"""
Construct and process a GetRecords request
Parameters
----------
- constraints: A list of OgcExpression objects
The list is interpretted like so:
[a,b,c]
a || b || c
[[a,b,c]]
a && b && c
[[a,b],[c],[d],[e]] or [[a,b],c,d,e]
(a && b) || c || d || e
- tostring (optional): return as string
"""
ors = []
if len(constraints) == 1:
if isinstance(constraints[0], OgcExpression):
flt = self.setConstraint(constraints[0])
else:
|
else:
return flt
for c in constraints:
if isinstance(c, OgcExpression):
ors.append(c)
elif isinstance(c, list) or isinstance(c, tuple):
if len(c) == 1:
ors.append(c[0])
elif len(c) >= 2:
ands = []
for sub in c:
if isinstance(sub, OgcExpression):
ands.append(sub)
ors.append(And(operations=ands))
self._root.append(Or(operations=ors).toXML())
if tostring:
return util.element_to_string(self._root, xml_declaration=False)
return self._root
class FilterCapabilities(object):
"""Abstraction for Filter_Capabilities 2.0"""
def __init__(self, elem):
if elem is None:
self.spatial_operands = []
self.spatial_operators = []
self.temporal_operators = []
self.temporal_operands = []
self.scalar_comparison_operators = []
self.conformance = {}
return
# Spatial_Capabilities
self.spatial_operands = [f.attrib.get('name') for f in elem.findall(util.nspath_eval(
'fes:Spatial_Capabilities/fes:GeometryOperands/fes:GeometryOperand', namespaces))]
self.spatial_operators = []
for f in elem.findall(util.nspath_eval(
'fes:Spatial_Capabilities/fes:SpatialOperators/fes:SpatialOperator', namespaces)):
self.spatial_operators.append(f.attrib['name'])
# Temporal_Capabilities
self.temporal_operands = [f.attrib.get('name') for f in elem.findall(util.nspath_eval(
'fes:Temporal_Capabilities/fes:TemporalOperands/fes:TemporalOperand', namespaces))]
self.temporal_operators = []
for f in elem.findall(util.nspath_eval(
'fes:Temporal_Capabilities/fes:TemporalOperators/fes:TemporalOperator', namespaces)):
self.temporal_operators.append(f.attrib['name'])
# Scalar_Capabilities
self.scalar_comparison_operators = [f.text for f in elem.findall(util.nspath_eval(
'fes:Scalar_Capabilities/fes:ComparisonOperators/fes:ComparisonOperator', namespaces))]
# Conformance
self.conformance = {}
for f in elem.findall(util.nspath_eval('fes:Conformance/fes:Constraint', namespaces)):
self.conformance[f.attrib.get('name')] = f.find(util.nspath_eval('ows110:DefaultValue', namespaces)).text
def setsortby(parent, propertyname, order='ASC'):
"""
constructs a SortBy element
Parameters
----------
- parent: parent etree.Element object
- propertyname: the ValueReference
- order: the SortOrder (default is 'ASC')
"""
tmp = etree.SubElement(parent, util.nspath_eval('fes:SortBy', namespaces))
tmp2 = etree.SubElement(tmp, util.nspath_eval('fes:SortProperty', namespaces))
etree.SubElement(tmp2, util.nspath_eval('fes:ValueReference', namespaces)).text = propertyname
etree.SubElement(tmp2, util.nspath_eval('fes:SortOrder', namespaces)).text = order
class SortProperty(object):
def __init__(self, propertyname, order='ASC'):
self.propertyname = propertyname
self.order = order.upper()
if self.order not in ['DESC', 'ASC']:
raise ValueError("SortOrder can only be 'ASC' or 'DESC'")
def toXML(self):
node0 = etree.Element(util.nspath_eval("fes:SortProperty", namespaces))
etree.SubElement(node0, util.nspath_eval('fes:ValueReference', namespaces)).text = self.propertyname
etree.SubElement(node0, util.nspath_eval('fes:SortOrder', namespaces)).text = self.order
return node0
class SortBy(object):
def __init__(self, properties):
self.properties = properties
def toXML(self):
node0 = etree.Element(util.nspath_eval("fes:SortBy", namespaces))
for prop in self.properties:
node0.append(prop.toXML())
return node0
class OgcExpression(object):
def __init__(self):
pass
class BinaryComparisonOpType(OgcExpression):
""" Super class of all the property operation classes"""
def __init__(self, propertyoperator, propertyname, literal, matchcase=True):
self.propertyoperator = propertyoperator
self.propertyname = propertyname
self.literal = literal
self.matchcase = matchcase
def toXML(self):
node0 = etree.Element(util.nspath_eval(self.propertyoperator, namespaces))
if not self.matchcase:
node0.set('matchCase', 'false')
etree.SubElement(node0, util.nspath_eval('fes:ValueReference', namespaces)).text = self.propertyname
etree.SubElement(node0, util.nspath_eval('fes:Literal', namespaces)).text = self.literal
return node0
class PropertyIsEqualTo(BinaryComparisonOpType):
""" PropertyIsEqualTo class"""
def __init__(self, propertyname, literal, matchcase=True):
BinaryComparisonOpType.__init__(self, 'fes:PropertyIsEqualTo', propertyname, literal, matchcase)
class PropertyIsNotEqualTo(BinaryComparisonOpType):
""" PropertyIsNotEqualTo class """
def __init__(self, propertyname, literal, matchcase=True):
BinaryComparisonOpType.__init__(self, 'fes:PropertyIsNotEqualTo', propertyname, literal, matchcase)
class PropertyIsLessThan(BinaryComparisonOpType):
"""PropertyIsLessThan class"""
def __init__(self, propertyname, literal, matchcase=True):
BinaryComparisonOpType.__init__(self, 'fes:PropertyIsLessThan', propertyname, literal, matchcase)
class PropertyIsGreaterThan(BinaryComparisonOpType):
"""PropertyIsGreaterThan class"""
def __init__(self, propertyname, literal, matchcase=True):
BinaryComparisonOpType.__init__(self, 'fes:PropertyIsGreaterThan', propertyname, literal, matchcase)
class PropertyIsLessThanOrEqualTo(BinaryComparisonOpType):
"""PropertyIsLessThanOrEqualTo class"""
def __init__(self, propertyname, literal, matchcase=True):
BinaryComparisonOpType.__init__(self, 'fes:PropertyIsLessThanOrEqualTo', propertyname, literal, matchcase)
class PropertyIsGreaterThanOrEqualTo(BinaryComparisonOpType):
"""PropertyIsGreaterThanOrEqualTo class"""
def __init__(self, propertyname, literal, matchcase=True):
BinaryComparisonOpType.__init__(self, 'fes:PropertyIsGreaterThanOrEqualTo', propertyname, literal, matchcase)
class PropertyIsLike(OgcExpression):
"""PropertyIsLike class"""
def __init__(self, propertyname, literal, escapeChar='\\', singleChar='_', wildCard='%', matchCase=True):
self.propertyname = propertyname
self.literal = literal
self.escapeChar = escapeChar
self.singleChar = singleChar
self.wildCard = wildCard
self.matchCase = matchCase
def toXML(self):
node0 = etree.Element(util.nspath_eval('fes:PropertyIsLike', namespaces))
node0.set('wildCard', self.wildCard)
node0.set('singleChar', self.singleChar)
node0.set('escapeChar', self.escapeChar)
if not self.matchCase:
node0.set('matchCase', 'false')
etree.SubElement(node0, util.nspath_eval('fes:ValueReference', namespaces)).text = self.propertyname
etree.SubElement(node0, util.nspath_eval('fes:Literal', namespaces)).text = self.literal
return node0
class PropertyIsNull(OgcExpression):
"""PropertyIsNull class"""
def __init__(self, propertyname):
self.propertyname = propertyname
def toXML(self):
node0 = etree.Element(util.nspath_eval('fes:PropertyIsNull', namespaces))
etree.SubElement(node0, util.nspath_eval('fes:ValueReference', namespaces)).text = self.propertyname
return node0
class PropertyIsBetween(OgcExpression):
"""PropertyIsBetween class"""
def __init__(self, propertyname, lower, upper):
self.propertyname = propertyname
self.lower = lower
self.upper = upper
def toXML(self):
node0 = etree.Element(util.nspath_eval('fes:PropertyIsBetween', namespaces))
etree.SubElement(node0, util.nspath_eval('fes:ValueReference', namespaces)).text = self.propertyname
node1 = etree.SubElement(node0, util.nspath_eval('fes:LowerBoundary', namespaces))
etree.SubElement(node1, util.nspath_eval('fes:Literal', namespaces)).text = '%s' % self.lower
node2 = etree.SubElement(node0, util.nspath_eval('fes:UpperBoundary', namespaces))
etree.SubElement(node2, util.nspath_eval('fes:Literal', namespaces)).text = '%s' % self.upper
return node0
class BBox(OgcExpression):
"""Construct a BBox, two pairs of coordinates (west-south and east-north)"""
def __init__(self, bbox, crs=None):
self.bbox = bbox
self.crs = crs
def toXML(self):
tmp = etree.Element(util.nspath_eval('fes:BBOX', namespaces))
etree.SubElement(tmp, util.nspath_eval('fes:ValueReference', namespaces)).text = 'ows:BoundingBox'
tmp2 = etree.SubElement(tmp, util.nspath_eval('gml:Envelope', namespaces))
if self.crs is not None:
tmp2.set('srsName', self.crs)
etree.SubElement(tmp2, util.nspath_eval('gml:lowerCorner', namespaces)).text = '{} {}'.format(
self.bbox[0], self.bbox[1])
etree.SubElement(tmp2, util.nspath_eval('gml:upperCorner', namespaces)).text = '{} {}'.format(
self.bbox[2], self.bbox[3])
return tmp
# BINARY
class BinaryLogicOpType(OgcExpression):
""" Binary Operators: And / Or """
def __init__(self, binary_operator, operations):
self.binary_operator = binary_operator
try:
assert len(operations) >= 2
self.operations = operations
except Exception:
raise ValueError("Binary operations (And / Or) require a minimum of two operations to operate against")
def toXML(self):
node0 = etree.Element(util.nspath_eval(self.binary_operator, namespaces))
for op in self.operations:
node0.append(op.toXML())
return node0
class And(BinaryLogicOpType):
def __init__(self, operations):
super(And, self).__init__('fes:And', operations)
class Or(BinaryLogicOpType):
def __init__(self, operations):
super(Or, self).__init__('fes:Or', operations)
# UNARY
class UnaryLogicOpType(OgcExpression):
""" Unary Operator: Not """
def __init__(self, unary_operator, operations):
self.unary_operator = unary_operator
self.operations = operations
def toXML(self):
node0 = etree.Element(util.nspath_eval(self.unary_operator, namespaces))
for op in self.operations:
node0.append(op.toXML())
return node0
class Not(UnaryLogicOpType):
def __init__(self, operations):
super(Not, self).__init__('fes:Not', operations)
|
self._root.append(And(operations=constraints[0]).toXML())
flt = self._root
if tostring:
return util.element_to_string(flt, xml_declaration=False)
|
awsfederatedaccountaccess_controller.go
|
package awsfederatedaccountaccess
import (
"context"
"encoding/json"
"errors"
"fmt"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/iam"
"github.com/aws/aws-sdk-go/service/sts"
"github.com/go-logr/logr"
"k8s.io/apimachinery/pkg/types"
awsv1alpha1 "github.com/openshift/aws-account-operator/pkg/apis/aws/v1alpha1"
"github.com/openshift/aws-account-operator/pkg/awsclient"
controllerutils "github.com/openshift/aws-account-operator/pkg/controller/utils"
corev1 "k8s.io/api/core/v1"
k8serr "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/controller"
"sigs.k8s.io/controller-runtime/pkg/handler"
logf "sigs.k8s.io/controller-runtime/pkg/log"
"sigs.k8s.io/controller-runtime/pkg/manager"
"sigs.k8s.io/controller-runtime/pkg/reconcile"
"sigs.k8s.io/controller-runtime/pkg/source"
)
const (
controllerName = "awsfederatedaccountaccess"
)
// Custom errors
// ErrFederatedAccessRoleNotFound indicates the role requested by AWSFederatedAccountAccess Cr was not found as a AWSFederatedRole Cr
var ErrFederatedAccessRoleNotFound = errors.New("FederatedAccessRoleNotFound")
// ErrFederatedAccessRoleFailedCreate indicates that the AWSFederatedRole requested failed to be created in the account requested by the AWSFederatedAccountAccess CR
var ErrFederatedAccessRoleFailedCreate = errors.New("FederatedAccessRoleFailedCreate")
var log = logf.Log.WithName("controller_awsfederatedaccountaccess")
// Add creates a new AWSFederatedAccountAccess Controller and adds it to the Manager. The Manager will set fields on the Controller
// and Start it when the Manager is Started.
func Add(mgr manager.Manager) error {
return add(mgr, newReconciler(mgr))
}
// newReconciler returns a new reconcile.Reconciler
func newReconciler(mgr manager.Manager) reconcile.Reconciler
|
// add adds a new Controller to mgr with r as the reconcile.Reconciler
func add(mgr manager.Manager, r reconcile.Reconciler) error {
// Create a new controller
c, err := controller.New("awsfederatedaccountaccess-controller", mgr, controller.Options{Reconciler: r})
if err != nil {
return err
}
// Watch for changes to primary resource AWSFederatedAccountAccess
err = c.Watch(&source.Kind{Type: &awsv1alpha1.AWSFederatedAccountAccess{}}, &handler.EnqueueRequestForObject{})
if err != nil {
return err
}
return nil
}
// blank assignment to verify that ReconcileAWSFederatedAccountAccess implements reconcile.Reconciler
var _ reconcile.Reconciler = &ReconcileAWSFederatedAccountAccess{}
// ReconcileAWSFederatedAccountAccess reconciles a AWSFederatedAccountAccess object
type ReconcileAWSFederatedAccountAccess struct {
// This client, initialized using mgr.Client() above, is a split client
// that reads objects from the cache and writes to the apiserver
client client.Client
scheme *runtime.Scheme
awsClientBuilder awsclient.IBuilder
}
// Reconcile reads that state of the cluster for a AWSFederatedAccountAccess object and makes changes based on the state read
// and what is in the AWSFederatedAccountAccess.Spec
func (r *ReconcileAWSFederatedAccountAccess) Reconcile(request reconcile.Request) (reconcile.Result, error) {
reqLogger := log.WithValues("Controller", controllerName, "Request.Namespace", request.Namespace, "Request.Name", request.Name)
// Fetch the AWSFederatedAccountAccess instance
currentFAA := &awsv1alpha1.AWSFederatedAccountAccess{}
err := r.client.Get(context.TODO(), request.NamespacedName, currentFAA)
if err != nil {
if k8serr.IsNotFound(err) {
// Request object not found, could have been deleted after reconcile request.
// Owned objects are automatically garbage collected. For additional cleanup logic use finalizers.
// Return and don't requeue
return reconcile.Result{}, nil
}
// Error reading the object - requeue the request.
return reconcile.Result{}, err
}
requestedRole := &awsv1alpha1.AWSFederatedRole{}
err = r.client.Get(context.TODO(), types.NamespacedName{Name: currentFAA.Spec.AWSFederatedRole.Name, Namespace: currentFAA.Spec.AWSFederatedRole.Namespace}, requestedRole)
if err != nil {
if k8serr.IsNotFound(err) {
SetStatuswithCondition(currentFAA, "Requested role does not exist", awsv1alpha1.AWSFederatedAccountFailed, awsv1alpha1.AWSFederatedAccountStateFailed)
reqLogger.Error(ErrFederatedAccessRoleNotFound, fmt.Sprintf("Requested role %s not found", currentFAA.Spec.AWSFederatedRole.Name))
err := r.client.Status().Update(context.TODO(), currentFAA)
if err != nil {
reqLogger.Error(err, fmt.Sprintf("Status update for %s failed", currentFAA.Name))
return reconcile.Result{}, err
}
return reconcile.Result{}, nil
}
// Error reading the object - requeue the request.
return reconcile.Result{}, err
}
// Add finalizer to the CR in case it's not present (e.g. old accounts)
if !controllerutils.Contains(currentFAA.GetFinalizers(), controllerutils.Finalizer) {
err := r.addFinalizer(reqLogger, currentFAA)
if err != nil {
return reconcile.Result{}, err
}
return reconcile.Result{}, nil
}
if currentFAA.DeletionTimestamp != nil {
if controllerutils.Contains(currentFAA.GetFinalizers(), controllerutils.Finalizer) {
reqLogger.Info("Cleaning up FederatedAccountAccess Roles")
err = r.cleanFederatedRoles(reqLogger, currentFAA, requestedRole)
if err != nil {
return reconcile.Result{}, err
}
reqLogger.Info("Removing Finalizer")
err = r.removeFinalizer(reqLogger, currentFAA, controllerutils.Finalizer)
if err != nil {
return reconcile.Result{}, err
}
}
}
// If the state is ready or failed don't do anything
if currentFAA.Status.State == awsv1alpha1.AWSFederatedAccountStateReady || currentFAA.Status.State == awsv1alpha1.AWSFederatedAccountStateFailed {
return reconcile.Result{}, nil
}
// Check if the FAA has the uid label
if !hasLabel(currentFAA, awsv1alpha1.UIDLabel) {
// Generate a new UID
uid := controllerutils.GenerateShortUID()
reqLogger.Info(fmt.Sprintf("Adding UID %s to AccountAccess %s", uid, currentFAA.Name))
newLabel := map[string]string{"uid": uid}
// Join the new UID label with any current labels
if currentFAA.Labels != nil {
currentFAA.Labels = controllerutils.JoinLabelMaps(currentFAA.Labels, newLabel)
} else {
currentFAA.Labels = newLabel
}
// Update the CR with new labels
err = r.client.Update(context.TODO(), currentFAA)
if err != nil {
reqLogger.Error(err, fmt.Sprintf("Lable update for %s failed", currentFAA.Name))
return reconcile.Result{}, err
}
}
uidLabel, ok := currentFAA.Labels[awsv1alpha1.UIDLabel]
if !ok {
return reconcile.Result{}, err
}
// Get aws client
awsClient, err := r.awsClientBuilder.GetClient(controllerName, r.client, awsclient.NewAwsClientInput{
SecretName: currentFAA.Spec.AWSCustomerCredentialSecret.Name,
NameSpace: currentFAA.Spec.AWSCustomerCredentialSecret.Namespace,
AwsRegion: "us-east-1",
})
if err != nil {
reqLogger.Error(err, "Unable to create aws client for region ")
return reconcile.Result{}, err
}
// Get account number of cluster account
gciOut, err := awsClient.GetCallerIdentity(&sts.GetCallerIdentityInput{})
if err != nil {
SetStatuswithCondition(currentFAA, "Failed to get account ID information", awsv1alpha1.AWSFederatedAccountFailed, awsv1alpha1.AWSFederatedAccountStateFailed)
controllerutils.LogAwsError(log, fmt.Sprintf("Failed to get account ID information for '%s'", currentFAA.Name), err, err)
err := r.client.Status().Update(context.TODO(), currentFAA)
if err != nil {
reqLogger.Error(err, fmt.Sprintf("Status update for %s failed", currentFAA.Name))
return reconcile.Result{}, err
}
return reconcile.Result{}, err
}
accountID := *gciOut.Account // Add requested aws managed policies to the role
if !hasLabel(currentFAA, awsv1alpha1.AccountIDLabel) {
reqLogger.Info(fmt.Sprintf("Adding awsAccountID %s to AccountAccess %s", accountID, currentFAA.Name))
newLabel := map[string]string{"awsAccountID": accountID}
// Join the new UID label with any current labels
if currentFAA.Labels != nil {
currentFAA.Labels = controllerutils.JoinLabelMaps(currentFAA.Labels, newLabel)
} else {
currentFAA.Labels = newLabel
}
// Update the CR with new labels
err = r.client.Update(context.TODO(), currentFAA)
if err != nil {
reqLogger.Error(err, fmt.Sprintf("Label update for %s failed", currentFAA.Name))
return reconcile.Result{}, err
}
}
// Here create the custom policy in the cluster account
err = r.createOrUpdateIAMPolicy(awsClient, *requestedRole, *currentFAA)
if err != nil {
// if we were unable to create the policy fail this CR.
SetStatuswithCondition(currentFAA, "Failed to create custom policy", awsv1alpha1.AWSFederatedAccountFailed, awsv1alpha1.AWSFederatedAccountStateFailed)
reqLogger.Error(err, fmt.Sprintf("Unable to create policy requested by '%s'", currentFAA.Name))
err := r.client.Status().Update(context.TODO(), currentFAA)
if err != nil {
reqLogger.Error(err, fmt.Sprintf("Status update for %s failed", currentFAA.Name))
return reconcile.Result{}, err
}
return reconcile.Result{}, nil
}
// Create role and apply custom policies and awsmanagedpolicies
role, err := r.createOrUpdateIAMRole(awsClient, *requestedRole, *currentFAA, reqLogger)
if err != nil {
SetStatuswithCondition(currentFAA, "Failed to create role", awsv1alpha1.AWSFederatedAccountFailed, awsv1alpha1.AWSFederatedAccountStateFailed)
reqLogger.Error(ErrFederatedAccessRoleFailedCreate, fmt.Sprintf("Unable to create role requested by '%s'", currentFAA.Name), "AWS ERROR: ", err)
err := r.client.Status().Update(context.TODO(), currentFAA)
if err != nil {
reqLogger.Error(err, fmt.Sprintf("Status update for %s failed", currentFAA.Name))
return reconcile.Result{}, err
}
return reconcile.Result{}, nil
}
currentFAA.Status.ConsoleURL = fmt.Sprintf("https://signin.aws.amazon.com/switchrole?account=%s&roleName=%s", accountID, *role.RoleName)
awsManagedPolicyNames := []string{}
// Add all aws managed policy names to a array
awsManagedPolicyNames = append(awsManagedPolicyNames, requestedRole.Spec.AWSManagedPolicies...)
// Get policy arns for managed policies
policyArns := createPolicyArns(accountID, awsManagedPolicyNames, true)
// Get custom policy arns
customPolicy := []string{requestedRole.Spec.AWSCustomPolicy.Name + "-" + uidLabel}
customerPolArns := createPolicyArns(accountID, customPolicy, false)
policyArns = append(policyArns, customerPolArns[0])
// Attach the requested policy to the newly created role
err = r.attachIAMPolices(awsClient, currentFAA.Spec.AWSFederatedRole.Name+"-"+uidLabel, policyArns)
if err != nil {
//TODO() role should be deleted here so that we leave nothing behind.
SetStatuswithCondition(currentFAA, "Failed to attach policies to role", awsv1alpha1.AWSFederatedAccountFailed, awsv1alpha1.AWSFederatedAccountStateFailed)
reqLogger.Error(err, fmt.Sprintf("Failed to attach policies to role requested by '%s'", currentFAA.Name))
err := r.client.Status().Update(context.TODO(), currentFAA)
if err != nil {
reqLogger.Error(err, fmt.Sprintf("Status update for %s failed", currentFAA.Name))
return reconcile.Result{}, err
}
return reconcile.Result{}, nil
}
// Mark AWSFederatedAccountAccess CR as Ready.
SetStatuswithCondition(currentFAA, "Account Access Ready", awsv1alpha1.AWSFederatedAccountReady, awsv1alpha1.AWSFederatedAccountStateReady)
reqLogger.Info(fmt.Sprintf("Successfully applied %s", currentFAA.Name))
err = r.client.Status().Update(context.TODO(), currentFAA)
if err != nil {
reqLogger.Error(err, fmt.Sprintf("Status update for %s failed", currentFAA.Name))
return reconcile.Result{}, err
}
return reconcile.Result{}, nil
}
// createIAMPolicy creates the IAM policys in AWSFederatedRole inside of our cluster account
func (r *ReconcileAWSFederatedAccountAccess) createIAMPolicy(awsClient awsclient.Client, afr awsv1alpha1.AWSFederatedRole, afaa awsv1alpha1.AWSFederatedAccountAccess) (*iam.Policy, error) {
// Same struct from the afr.Spec.AWSCustomPolicy.Statements , but with json tags as capitals due to requirements for the policydoc
type awsStatement struct {
Effect string `json:"Effect"`
Action []string `json:"Action"`
Resource []string `json:"Resource,omitempty"`
Condition *awsv1alpha1.Condition `json:"Condition,omitempty"`
Principal *awsv1alpha1.Principal `json:"Principal,omitempty"`
}
statements := []awsStatement{}
for _, sm := range afr.Spec.AWSCustomPolicy.Statements {
var a awsStatement = awsStatement(sm)
statements = append(statements, a)
}
// Create an aws policydoc formated struct
policyDoc := struct {
Version string
Statement []awsStatement
}{
Version: "2012-10-17",
Statement: statements,
}
// Marshal policydoc to json
jsonPolicyDoc, err := json.Marshal(&policyDoc)
if err != nil {
return &iam.Policy{}, fmt.Errorf("Error marshalling jsonPolicy doc : Error %s", err.Error())
}
var policyName string
// Try and build policy name
if uidLabel, ok := afaa.Labels["uid"]; ok {
policyName = afr.Spec.AWSCustomPolicy.Name + "-" + uidLabel
} else {
// Just in case the UID somehow doesn't exist
return nil, err
}
output, err := awsClient.CreatePolicy(&iam.CreatePolicyInput{
PolicyName: aws.String(policyName),
Description: aws.String(afr.Spec.AWSCustomPolicy.Description),
PolicyDocument: aws.String(string(jsonPolicyDoc)),
})
if err != nil {
return nil, err
}
return output.Policy, nil
}
func (r *ReconcileAWSFederatedAccountAccess) createIAMRole(awsClient awsclient.Client, afr awsv1alpha1.AWSFederatedRole, afaa awsv1alpha1.AWSFederatedAccountAccess) (*iam.Role, error) {
type awsStatement struct {
Effect string `json:"Effect"`
Action []string `json:"Action"`
Resource []string `json:"Resource,omitempty"`
Principal *awsv1alpha1.Principal `json:"Principal,omitempty"`
}
assumeRolePolicyDoc := struct {
Version string
Statement []awsStatement
}{
Version: "2012-10-17",
Statement: []awsStatement{{
Effect: "Allow",
Action: []string{"sts:AssumeRole"},
Principal: &awsv1alpha1.Principal{
AWS: []string{afaa.Spec.ExternalCustomerAWSIAMARN},
},
}},
}
// Marshal assumeRolePolicyDoc to json
jsonAssumeRolePolicyDoc, err := json.Marshal(&assumeRolePolicyDoc)
if err != nil {
return nil, err
}
var roleName string
// Try and build role name
if uidLabel, ok := afaa.Labels["uid"]; ok {
roleName = afr.Name + "-" + uidLabel
} else {
// Just in case the UID somehow doesn't exist
return nil, err
}
createRoleOutput, err := awsClient.CreateRole(&iam.CreateRoleInput{
RoleName: aws.String(roleName),
Description: aws.String(afr.Spec.RoleDescription),
AssumeRolePolicyDocument: aws.String(string(jsonAssumeRolePolicyDoc)),
})
if err != nil {
return nil, err
}
return createRoleOutput.Role, nil
}
func (r *ReconcileAWSFederatedAccountAccess) createOrUpdateIAMPolicy(awsClient awsclient.Client, afr awsv1alpha1.AWSFederatedRole, afaa awsv1alpha1.AWSFederatedAccountAccess) error {
uidLabel, ok := afaa.Labels["uid"]
if !ok {
return errors.New("Unable to get UID label")
}
gciOut, err := awsClient.GetCallerIdentity(&sts.GetCallerIdentityInput{})
if err != nil {
return err
}
customPolArns := createPolicyArns(*gciOut.Account, []string{afr.Spec.AWSCustomPolicy.Name + "-" + uidLabel}, false)
_, err = r.createIAMPolicy(awsClient, afr, afaa)
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
if aerr.Code() == "EntityAlreadyExists" {
_, err = awsClient.DeletePolicy(&iam.DeletePolicyInput{PolicyArn: aws.String(customPolArns[0])})
if err != nil {
return err
}
_, err = r.createIAMPolicy(awsClient, afr, afaa)
if err != nil {
return err
}
}
}
}
return nil
}
func (r *ReconcileAWSFederatedAccountAccess) createOrUpdateIAMRole(awsClient awsclient.Client, afr awsv1alpha1.AWSFederatedRole, afaa awsv1alpha1.AWSFederatedAccountAccess, reqLogger logr.Logger) (*iam.Role, error) {
uidLabel, ok := afaa.Labels["uid"]
if !ok {
return nil, errors.New("Unable to get UID label")
}
roleName := afaa.Spec.AWSFederatedRole.Name + "-" + uidLabel
role, err := r.createIAMRole(awsClient, afr, afaa)
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
switch aerr.Code() {
case "EntityAlreadyExists":
_, err := awsClient.DeleteRole(&iam.DeleteRoleInput{RoleName: aws.String(roleName)})
if err != nil {
return nil, err
}
role, err := r.createIAMRole(awsClient, afr, afaa)
if err != nil {
return nil, err
}
return role, nil
default:
// Handle unexpected AWS API errors
controllerutils.LogAwsError(reqLogger, "createOrUpdateIAMRole: Unexpected AWS Error creating IAM Role", nil, err)
return nil, err
}
}
// Return all other (non-AWS) errors
return nil, err
}
return role, nil
}
func (r *ReconcileAWSFederatedAccountAccess) attachIAMPolices(awsClient awsclient.Client, roleName string, policyArns []string) error {
for _, pol := range policyArns {
_, err := awsClient.AttachRolePolicy(&iam.AttachRolePolicyInput{
PolicyArn: aws.String(pol),
RoleName: aws.String(roleName),
})
if err != nil {
return err
}
}
return nil
}
// Pass in the account id of the account where you the policies live.
func createPolicyArns(accountID string, policyNames []string, awsManaged bool) []string {
awsPolicyArnPrefix := ""
if awsManaged {
awsPolicyArnPrefix = "arn:aws:iam::aws:policy/"
} else {
awsPolicyArnPrefix = fmt.Sprintf("arn:aws:iam::%s:policy/", accountID)
}
policyArns := []string{}
for _, policy := range policyNames {
policyArns = append(policyArns, fmt.Sprintf("%s%s", awsPolicyArnPrefix, policy))
}
return policyArns
}
// SetStatuswithCondition sets the status of an account
func SetStatuswithCondition(afaa *awsv1alpha1.AWSFederatedAccountAccess, message string, ctype awsv1alpha1.AWSFederatedAccountAccessConditionType, state awsv1alpha1.AWSFederatedAccountAccessState) {
afaa.Status.Conditions = controllerutils.SetAWSFederatedAccountAccessCondition(
afaa.Status.Conditions,
ctype,
corev1.ConditionTrue,
string(state),
message,
controllerutils.UpdateConditionNever)
afaa.Status.State = state
}
func (r *ReconcileAWSFederatedAccountAccess) addFinalizer(reqLogger logr.Logger, awsFederatedAccountAccess *awsv1alpha1.AWSFederatedAccountAccess) error {
reqLogger.Info("Adding Finalizer for the AccountClaim")
awsFederatedAccountAccess.SetFinalizers(append(awsFederatedAccountAccess.GetFinalizers(), controllerutils.Finalizer))
// Update CR
err := r.client.Update(context.TODO(), awsFederatedAccountAccess)
if err != nil {
reqLogger.Error(err, "Failed to update AccountClaim with finalizer")
return err
}
return nil
}
func (r *ReconcileAWSFederatedAccountAccess) removeFinalizer(reqLogger logr.Logger, AWSFederatedAccountAccess *awsv1alpha1.AWSFederatedAccountAccess, finalizerName string) error {
reqLogger.Info("Removing Finalizer for the AWSFederatedAccountAccess")
AWSFederatedAccountAccess.SetFinalizers(controllerutils.Remove(AWSFederatedAccountAccess.GetFinalizers(), finalizerName))
// Update CR
err := r.client.Update(context.TODO(), AWSFederatedAccountAccess)
if err != nil {
reqLogger.Error(err, "Failed to remove AWSFederatedAccountAccess finalizer")
return err
}
return nil
}
func (r *ReconcileAWSFederatedAccountAccess) cleanFederatedRoles(reqLogger logr.Logger, currentFAA *awsv1alpha1.AWSFederatedAccountAccess, federatedRoleCR *awsv1alpha1.AWSFederatedRole) error {
// Get the UID
uidLabel, ok := currentFAA.Labels[awsv1alpha1.UIDLabel]
if !ok {
if currentFAA.Status.State != awsv1alpha1.AWSFederatedAccountStateReady {
log.Info("UID Label missing with CR not ready, removing finalizer")
return nil
}
return errors.New("Unable to get UID label")
}
// Get the AWS Account ID
accountIDLabel, ok := currentFAA.Labels[awsv1alpha1.AccountIDLabel]
if !ok {
if currentFAA.Status.State != awsv1alpha1.AWSFederatedAccountStateReady {
log.Info("AWS Account ID Label missing with CR not ready, removing finalizer")
return nil
}
return errors.New("Unable to get AWS Account ID label")
}
roleName := currentFAA.Spec.AWSFederatedRole.Name + "-" + uidLabel
// Build AWS client from root secret
rootAwsClient, err := r.awsClientBuilder.GetClient(controllerName, r.client, awsclient.NewAwsClientInput{
SecretName: "aws-account-operator-credentials",
NameSpace: awsv1alpha1.AccountCrNamespace,
AwsRegion: "us-east-1",
})
if err != nil {
reqLogger.Error(err, "Unable to create root aws client for region ")
return err
}
assumeRoleOutput, err := rootAwsClient.AssumeRole(&sts.AssumeRoleInput{
RoleArn: aws.String(fmt.Sprintf("arn:aws:iam::%s:role/OrganizationAccountAccessRole", accountIDLabel)),
RoleSessionName: aws.String("FederatedRoleCleanup"),
})
if err != nil {
reqLogger.Info("Unable to assume role OrganizationAccountAccessRole, trying BYOCAdminAccess")
// Attempt to assume the BYOCAdminAccess role if OrganizationAccountAccess didn't work
assumeRoleOutput, err = rootAwsClient.AssumeRole(&sts.AssumeRoleInput{
RoleArn: aws.String(fmt.Sprintf("arn:aws:iam::%s:role/BYOCAdminAccess-%s", accountIDLabel, uidLabel)),
RoleSessionName: aws.String("FederatedRoleCleanup"),
})
if err != nil {
reqLogger.Error(err, "Unable to assume role BYOCAdminAccess Role")
return err
}
}
awsClient, err := r.awsClientBuilder.GetClient(controllerName, r.client, awsclient.NewAwsClientInput{
AwsCredsSecretIDKey: *assumeRoleOutput.Credentials.AccessKeyId,
AwsCredsSecretAccessKey: *assumeRoleOutput.Credentials.SecretAccessKey,
AwsToken: *assumeRoleOutput.Credentials.SessionToken,
AwsRegion: "us-east-1",
})
if err != nil {
reqLogger.Error(err, "Unable to create aws client for target linked account in region ")
return err
}
var nextMarker *string
// Paginate through attached policies and attempt to remove them
reqLogger.Info("Detaching Policies")
for {
attachedPolicyOutput, err := awsClient.ListAttachedRolePolicies(&iam.ListAttachedRolePoliciesInput{RoleName: aws.String(roleName), Marker: nextMarker})
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
switch aerr.Code() {
case "NoSuchEntity":
// Delete any custom policies made
err = r.deleteNonAttachedCustomPolicy(reqLogger, awsClient, federatedRoleCR)
if err != nil {
return err
}
return nil
default:
reqLogger.Error(
aerr,
fmt.Sprint(aerr.Error()),
)
reqLogger.Error(err, fmt.Sprintf("%v", err))
return err
}
} else {
reqLogger.Error(err, "NOther error while trying to list policies")
return err
}
}
for _, policy := range attachedPolicyOutput.AttachedPolicies {
_, err = awsClient.DetachRolePolicy(&iam.DetachRolePolicyInput{RoleName: aws.String(roleName), PolicyArn: policy.PolicyArn})
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
switch aerr.Code() {
default:
reqLogger.Error(
aerr,
fmt.Sprint(aerr.Error()),
)
reqLogger.Error(err, fmt.Sprintf("%v", err))
return err
}
} else {
reqLogger.Error(err, "NOther error while trying to detach policies")
return err
}
}
awsCustomPolicyname := federatedRoleCR.Spec.AWSCustomPolicy.Name + "-" + uidLabel
if *policy.PolicyName == awsCustomPolicyname {
_, err = awsClient.DeletePolicy(&iam.DeletePolicyInput{PolicyArn: policy.PolicyArn})
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
switch aerr.Code() {
default:
reqLogger.Error(
aerr,
fmt.Sprint(aerr.Error()),
)
reqLogger.Error(err, fmt.Sprintf("%v", err))
return err
}
} else {
reqLogger.Error(err, "NOther error while trying to detach policies")
return err
}
}
}
}
if *attachedPolicyOutput.IsTruncated {
nextMarker = attachedPolicyOutput.Marker
} else {
break
}
}
// Delete the role
reqLogger.Info("Deleting Role")
_, err = awsClient.DeleteRole(&iam.DeleteRoleInput{RoleName: aws.String(roleName)})
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
switch aerr.Code() {
default:
reqLogger.Error(aerr, fmt.Sprint(aerr.Error()))
return err
}
} else {
reqLogger.Error(err, "NOther error while trying to detach policies")
return err
}
}
return nil
}
func (r *ReconcileAWSFederatedAccountAccess) deleteNonAttachedCustomPolicy(reqLogger logr.Logger, awsClient awsclient.Client, federatedRoleCR *awsv1alpha1.AWSFederatedRole) error {
var policyMarker *string
// Paginate through custom policies
for {
policyListOutput, err := awsClient.ListPolicies(&iam.ListPoliciesInput{Scope: aws.String("Local"), Marker: policyMarker})
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
switch aerr.Code() {
default:
reqLogger.Error(aerr, fmt.Sprint(aerr.Error()))
return err
}
}
return err
}
for _, policy := range policyListOutput.Policies {
if *policy.PolicyName == federatedRoleCR.Spec.AWSCustomPolicy.Name {
_, err = awsClient.DeletePolicy(&iam.DeletePolicyInput{PolicyArn: policy.Arn})
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
switch aerr.Code() {
default:
reqLogger.Error(aerr, fmt.Sprint(aerr.Error()))
return err
}
}
return err
}
}
}
if *policyListOutput.IsTruncated {
policyMarker = policyListOutput.Marker
} else {
break
}
}
return nil
}
func hasLabel(awsFederatedAccountAccess *awsv1alpha1.AWSFederatedAccountAccess, labelKey string) bool {
// Check if the given key exists as a label
if _, ok := awsFederatedAccountAccess.Labels[labelKey]; ok {
return true
}
return false
}
|
{
reconciler := &ReconcileAWSFederatedAccountAccess{
client: controllerutils.NewClientWithMetricsOrDie(log, mgr, controllerName),
scheme: mgr.GetScheme(),
awsClientBuilder: &awsclient.Builder{},
}
return controllerutils.NewReconcilerWithMetrics(reconciler, controllerName)
}
|
sender.go
|
// Code generated by MockGen. DO NOT EDIT.
// Source: github.com/moira-alert/moira (interfaces: Sender)
// Package mock_moira_alert is a generated GoMock package.
package mock_moira_alert
import (
reflect "reflect"
time "time"
gomock "github.com/golang/mock/gomock"
moira "github.com/moira-alert/moira"
)
// MockSender is a mock of Sender interface.
type MockSender struct {
ctrl *gomock.Controller
recorder *MockSenderMockRecorder
}
// MockSenderMockRecorder is the mock recorder for MockSender.
type MockSenderMockRecorder struct {
mock *MockSender
}
// NewMockSender creates a new mock instance.
func
|
(ctrl *gomock.Controller) *MockSender {
mock := &MockSender{ctrl: ctrl}
mock.recorder = &MockSenderMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use.
func (m *MockSender) EXPECT() *MockSenderMockRecorder {
return m.recorder
}
// Init mocks base method.
func (m *MockSender) Init(arg0 map[string]string, arg1 moira.Logger, arg2 *time.Location, arg3 string) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Init", arg0, arg1, arg2, arg3)
ret0, _ := ret[0].(error)
return ret0
}
// Init indicates an expected call of Init.
func (mr *MockSenderMockRecorder) Init(arg0, arg1, arg2, arg3 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Init", reflect.TypeOf((*MockSender)(nil).Init), arg0, arg1, arg2, arg3)
}
// SendEvents mocks base method.
func (m *MockSender) SendEvents(arg0 moira.NotificationEvents, arg1 moira.ContactData, arg2 moira.TriggerData, arg3 [][]byte, arg4 bool) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "SendEvents", arg0, arg1, arg2, arg3, arg4)
ret0, _ := ret[0].(error)
return ret0
}
// SendEvents indicates an expected call of SendEvents.
func (mr *MockSenderMockRecorder) SendEvents(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SendEvents", reflect.TypeOf((*MockSender)(nil).SendEvents), arg0, arg1, arg2, arg3, arg4)
}
|
NewMockSender
|
store_api.rs
|
/*
* OpenAPI Petstore
*
* This is a sample server Petstore server. For this sample, you can use the api key `special-key` to test the authorization filters.
*
* The version of the OpenAPI document: 1.0.0
*
* Generated by: https://openapi-generator.tech
*/
use reqwest;
use crate::apis::ResponseContent;
use super::{Error, configuration};
/// struct for typed errors of method `delete_order`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DeleteOrderError {
Status400(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `get_inventory`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GetInventoryError {
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `get_order_by_id`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GetOrderByIdError {
Status400(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `place_order`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum PlaceOrderError {
Status400(),
UnknownValue(serde_json::Value),
}
/// For valid response try integer IDs with value < 1000. Anything above 1000 or nonintegers will generate API errors
pub fn delete_order(configuration: &configuration::Configuration, order_id: &str) -> Result<(), Error<DeleteOrderError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/store/order/{orderId}", configuration.base_path, orderId=crate::apis::urlencode(order_id));
let mut local_var_req_builder = local_var_client.delete(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
let local_var_req = local_var_req_builder.build()?;
let mut local_var_resp = local_var_client.execute(local_var_req)?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text()?;
if local_var_status.is_success() {
Ok(())
} else {
let local_var_entity: Option<DeleteOrderError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Returns a map of status codes to quantities
pub fn get_inventory(configuration: &configuration::Configuration, ) -> Result<::std::collections::HashMap<String, i32>, Error<GetInventoryError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/store/inventory", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("api_key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let mut local_var_resp = local_var_client.execute(local_var_req)?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text()?;
if local_var_status.is_success()
|
else {
let local_var_entity: Option<GetInventoryError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// For valid response try integer IDs with value <= 5 or > 10. Other values will generated exceptions
pub fn get_order_by_id(configuration: &configuration::Configuration, order_id: i64) -> Result<crate::models::Order, Error<GetOrderByIdError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/store/order/{orderId}", configuration.base_path, orderId=order_id);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
let local_var_req = local_var_req_builder.build()?;
let mut local_var_resp = local_var_client.execute(local_var_req)?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text()?;
if local_var_status.is_success() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<GetOrderByIdError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
pub fn place_order(configuration: &configuration::Configuration, body: crate::models::Order) -> Result<crate::models::Order, Error<PlaceOrderError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/store/order", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.json(&body);
let local_var_req = local_var_req_builder.build()?;
let mut local_var_resp = local_var_client.execute(local_var_req)?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text()?;
if local_var_status.is_success() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<PlaceOrderError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
|
{
serde_json::from_str(&local_var_content).map_err(Error::from)
}
|
create.go
|
package create
import (
"fmt"
"net/http"
"path"
"strings"
"github.com/MakeNowJust/heredoc"
"github.com/cli/cli/git"
"github.com/cli/cli/internal/config"
"github.com/cli/cli/internal/ghrepo"
"github.com/cli/cli/internal/run"
"github.com/cli/cli/pkg/cmdutil"
"github.com/cli/cli/pkg/iostreams"
"github.com/cli/cli/pkg/prompt"
"github.com/cli/cli/utils"
"github.com/spf13/cobra"
)
type CreateOptions struct {
HttpClient func() (*http.Client, error)
Config func() (config.Config, error)
IO *iostreams.IOStreams
Name string
Description string
Homepage string
Team string
EnableIssues bool
EnableWiki bool
Public bool
}
func NewCmdCreate(f *cmdutil.Factory, runF func(*CreateOptions) error) *cobra.Command {
opts := &CreateOptions{
IO: f.IOStreams,
HttpClient: f.HttpClient,
Config: f.Config,
}
cmd := &cobra.Command{
Use: "create [<name>]",
Short: "Create a new repository",
Long: `Create a new GitHub repository.`,
Args: cobra.MaximumNArgs(1),
Example: heredoc.Doc(`
# create a repository under your account using the current directory name
$ gh repo create
# create a repository with a specific name
$ gh repo create my-project
# create a repository in an organization
$ gh repo create cli/my-project
`),
Annotations: map[string]string{
"help:arguments": heredoc.Doc(
`A repository can be supplied as an argument in any of the following formats:
- <OWNER/REPO>
- by URL, e.g. "https://github.com/OWNER/REPO"`),
},
RunE: func(cmd *cobra.Command, args []string) error {
if len(args) > 0 {
opts.Name = args[0]
}
if runF != nil {
return runF(opts)
}
return createRun(opts)
},
}
cmd.Flags().StringVarP(&opts.Description, "description", "d", "", "Description of repository")
cmd.Flags().StringVarP(&opts.Homepage, "homepage", "h", "", "Repository home page URL")
cmd.Flags().StringVarP(&opts.Team, "team", "t", "", "The name of the organization team to be granted access")
cmd.Flags().BoolVar(&opts.EnableIssues, "enable-issues", true, "Enable issues in the new repository")
cmd.Flags().BoolVar(&opts.EnableWiki, "enable-wiki", true, "Enable wiki in the new repository")
cmd.Flags().BoolVar(&opts.Public, "public", false, "Make the new repository public (default: private)")
return cmd
}
func createRun(opts *CreateOptions) error {
projectDir, projectDirErr := git.ToplevelDir()
var repoToCreate ghrepo.Interface
if opts.Name != ""
|
else {
if projectDirErr != nil {
return projectDirErr
}
repoToCreate = ghrepo.New("", path.Base(projectDir))
}
visibility := "PRIVATE"
if opts.Public {
visibility = "PUBLIC"
}
input := repoCreateInput{
Name: repoToCreate.RepoName(),
Visibility: visibility,
OwnerID: repoToCreate.RepoOwner(),
TeamID: opts.Team,
Description: opts.Description,
HomepageURL: opts.Homepage,
HasIssuesEnabled: opts.EnableIssues,
HasWikiEnabled: opts.EnableWiki,
}
httpClient, err := opts.HttpClient()
if err != nil {
return err
}
repo, err := repoCreate(httpClient, repoToCreate.RepoHost(), input)
if err != nil {
return err
}
stderr := opts.IO.ErrOut
stdout := opts.IO.Out
isTTY := opts.IO.IsStdoutTTY()
if isTTY {
fmt.Fprintf(stderr, "%s Created repository %s on GitHub\n", utils.GreenCheck(), ghrepo.FullName(repo))
} else {
fmt.Fprintln(stdout, repo.URL)
}
// TODO This is overly wordy and I'd like to streamline this.
cfg, err := opts.Config()
if err != nil {
return err
}
protocol, err := cfg.Get(repo.RepoHost(), "git_protocol")
if err != nil {
return err
}
remoteURL := ghrepo.FormatRemoteURL(repo, protocol)
if projectDirErr == nil {
_, err = git.AddRemote("origin", remoteURL)
if err != nil {
return err
}
if isTTY {
fmt.Fprintf(stderr, "%s Added remote %s\n", utils.GreenCheck(), remoteURL)
}
} else if isTTY {
doSetup := false
err := prompt.Confirm(fmt.Sprintf("Create a local project directory for %s?", ghrepo.FullName(repo)), &doSetup)
if err != nil {
return err
}
if doSetup {
path := repo.Name
gitInit := git.GitCommand("init", path)
gitInit.Stdout = stdout
gitInit.Stderr = stderr
err = run.PrepareCmd(gitInit).Run()
if err != nil {
return err
}
gitRemoteAdd := git.GitCommand("-C", path, "remote", "add", "origin", remoteURL)
gitRemoteAdd.Stdout = stdout
gitRemoteAdd.Stderr = stderr
err = run.PrepareCmd(gitRemoteAdd).Run()
if err != nil {
return err
}
fmt.Fprintf(stderr, "%s Initialized repository in './%s/'\n", utils.GreenCheck(), path)
}
}
return nil
}
|
{
if strings.Contains(opts.Name, "/") {
var err error
repoToCreate, err = ghrepo.FromFullName(opts.Name)
if err != nil {
return fmt.Errorf("argument error: %w", err)
}
} else {
repoToCreate = ghrepo.New("", opts.Name)
}
}
|
match-static-const-lc.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #7526: lowercase static constants in patterns look like bindings
#![allow(dead_code)]
#![deny(non_uppercase_pattern_statics)]
pub static a : int = 97;
fn f() {
let r = match (0,0) {
(0, a) => 0,
//~^ ERROR static constant in pattern should be all caps
(x, y) => 1 + x + y,
};
assert!(r == 1);
}
mod m {
pub static aha : int = 7;
}
fn g()
|
mod n {
pub static OKAY : int = 8;
}
fn h() {
use not_okay = self::n::OKAY;
let r = match (0,0) {
(0, not_okay) => 0,
//~^ ERROR static constant in pattern should be all caps
(x, y) => 1 + x + y,
};
assert!(r == 1);
}
fn main () {
f();
g();
h();
}
|
{
use self::m::aha;
let r = match (0,0) {
(0, aha) => 0,
//~^ ERROR static constant in pattern should be all caps
(x, y) => 1 + x + y,
};
assert!(r == 1);
}
|
preprocessors.py
|
import numpy as np
import pandas as pd
from sklearn.base import BaseEstimator, TransformerMixin
from regression_model.processing.errors import InvalidModelInputError
class CategoricalImputer(BaseEstimator, TransformerMixin):
"""Categorical data missing value imputer."""
def __init__(self, variables=None) -> None:
if not isinstance(variables, list):
self.variables = [variables]
else:
self.variables = variables
<<<<<<< HEAD
def fit(self, X: pd.DataFrame, y: pd.Series = None
) -> 'CategoricalImputer':
=======
def fit(self, X: pd.DataFrame, y: pd.Series = None) -> "CategoricalImputer":
>>>>>>> upstream/master
"""Fit statement to accomodate the sklearn pipeline."""
return self
def transform(self, X: pd.DataFrame) -> pd.DataFrame:
"""Apply the transforms to the dataframe."""
X = X.copy()
for feature in self.variables:
<<<<<<< HEAD
X[feature] = X[feature].fillna('Missing')
=======
X[feature] = X[feature].fillna("Missing")
>>>>>>> upstream/master
return X
class NumericalImputer(BaseEstimator, TransformerMixin):
"""Numerical missing value imputer."""
def __init__(self, variables=None):
if not isinstance(variables, list):
self.variables = [variables]
else:
self.variables = variables
def fit(self, X, y=None):
# persist mode in a dictionary
self.imputer_dict_ = {}
for feature in self.variables:
self.imputer_dict_[feature] = X[feature].mode()[0]
return self
def transform(self, X):
X = X.copy()
for feature in self.variables:
X[feature].fillna(self.imputer_dict_[feature], inplace=True)
return X
class TemporalVariableEstimator(BaseEstimator, TransformerMixin):
"""Temporal variable calculator."""
def __init__(self, variables=None, reference_variable=None):
if not isinstance(variables, list):
self.variables = [variables]
else:
self.variables = variables
self.reference_variables = reference_variable
def fit(self, X, y=None):
# we need this step to fit the sklearn pipeline
return self
def transform(self, X):
X = X.copy()
for feature in self.variables:
X[feature] = X[self.reference_variables] - X[feature]
return X
class RareLabelCategoricalEncoder(BaseEstimator, TransformerMixin):
"""Rare label categorical encoder"""
def __init__(self, tol=0.05, variables=None):
self.tol = tol
if not isinstance(variables, list):
self.variables = [variables]
else:
self.variables = variables
def fit(self, X, y=None):
# persist frequent labels in dictionary
self.encoder_dict_ = {}
for var in self.variables:
# the encoder will learn the most frequent categories
t = pd.Series(X[var].value_counts() / np.float(len(X)))
# frequent labels:
self.encoder_dict_[var] = list(t[t >= self.tol].index)
return self
def
|
(self, X):
X = X.copy()
for feature in self.variables:
<<<<<<< HEAD
X[feature] = np.where(X[feature].isin(
self.encoder_dict_[feature]), X[feature], 'Rare')
=======
X[feature] = np.where(
X[feature].isin(self.encoder_dict_[feature]), X[feature], "Rare"
)
>>>>>>> upstream/master
return X
class CategoricalEncoder(BaseEstimator, TransformerMixin):
"""String to numbers categorical encoder."""
def __init__(self, variables=None):
if not isinstance(variables, list):
self.variables = [variables]
else:
self.variables = variables
def fit(self, X, y):
temp = pd.concat([X, y], axis=1)
<<<<<<< HEAD
temp.columns = list(X.columns) + ['target']
=======
temp.columns = list(X.columns) + ["target"]
>>>>>>> upstream/master
# persist transforming dictionary
self.encoder_dict_ = {}
for var in self.variables:
<<<<<<< HEAD
t = temp.groupby([var])['target'].mean().sort_values(
ascending=True).index
=======
t = temp.groupby([var])["target"].mean().sort_values(ascending=True).index
>>>>>>> upstream/master
self.encoder_dict_[var] = {k: i for i, k in enumerate(t, 0)}
return self
def transform(self, X):
# encode labels
X = X.copy()
for feature in self.variables:
X[feature] = X[feature].map(self.encoder_dict_[feature])
# check if transformer introduces NaN
if X[self.variables].isnull().any().any():
null_counts = X[self.variables].isnull().any()
<<<<<<< HEAD
vars_ = {key: value for (key, value) in null_counts.items()
if value is True}
raise InvalidModelInputError(
f'Categorical encoder has introduced NaN when '
f'transforming categorical variables: {vars_.keys()}')
=======
vars_ = {
key: value for (key, value) in null_counts.items() if value is True
}
raise InvalidModelInputError(
f"Categorical encoder has introduced NaN when "
f"transforming categorical variables: {vars_.keys()}"
)
>>>>>>> upstream/master
return X
class DropUnecessaryFeatures(BaseEstimator, TransformerMixin):
<<<<<<< HEAD
=======
>>>>>>> upstream/master
def __init__(self, variables_to_drop=None):
self.variables = variables_to_drop
def fit(self, X, y=None):
return self
def transform(self, X):
# encode labels
X = X.copy()
X = X.drop(self.variables, axis=1)
return X
|
transform
|
hot-tags.js
|
* @author errorrik([email protected])
*/
var splitStr2Obj = require('../util/split-str-2-obj');
/**
* 自闭合标签列表
*
* @type {Object}
*/
var hotTags = splitStr2Obj('div,span,input,button,textarea,form,label,dl,dt,dd,ul,ol,li,a,b,u,h1,h2,h3,h4,h5,h6');
exports = module.exports = hotTags;
|
/**
* @file 自闭合标签表
|
|
main.py
|
import threading
import Xlib
from Xlib.display import Display
from Xlib import X, XK
from Xlib.protocol import event
from normal import normal_mode
class Manager():
def __init__(self, inkscape_id):
self.id = inkscape_id
self.disp = Display()
self.screen = self.disp.screen()
self.root = self.screen.root
self.inkscape = self.disp.create_resource_object('window', inkscape_id)
self.mode = normal_mode
def event(self, name, detail, state):
return name(
time=X.CurrentTime,
root=self.root,
window=self.inkscape,
same_screen=0, child=Xlib.X.NONE,
root_x=0, root_y=0, event_x=0, event_y=0,
state=state,
detail=detail
)
def string_to_keycode(self, key):
keysym = XK.string_to_keysym(key)
keycode = self.disp.keysym_to_keycode(keysym)
return keycode
def press(self, key, mask=X.NONE):
|
def grab(self):
self.inkscape.grab_key(X.AnyKey, X.AnyModifier, True, X.GrabModeAsync, X.GrabModeAsync)
# Ungrab window manager shortcuts (Super + ...)
self.inkscape.ungrab_key(self.string_to_keycode('Super_L'), X.AnyModifier, True)
self.inkscape.change_attributes(event_mask=X.KeyReleaseMask | X.KeyPressMask | X.StructureNotifyMask)
def ungrab(self):
self.inkscape.ungrab_key(X.AnyKey, X.AnyModifier, True)
def listen(self):
self.grab()
while True:
evt = self.disp.next_event()
if evt.type in [X.KeyPress, X.KeyRelease]:
keycode = evt.detail
keysym = self.disp.keycode_to_keysym(keycode, 0)
char = XK.keysym_to_string(keysym)
self.disp.allow_events(X.ReplayKeyboard, X.CurrentTime)
self.mode(self, evt, char)
if evt.type == X.DestroyNotify:
if evt.window.id == self.id:
self.ungrab()
return
def create(inkscape_id):
m = Manager(inkscape_id)
m.listen()
def main():
disp = Display()
screen = disp.screen()
root = screen.root
root.change_attributes(event_mask=X.SubstructureNotifyMask)
while True:
evt = disp.next_event()
if evt.type == X.CreateNotify:
window = evt.window
try:
if window.get_wm_class() and window.get_wm_class()[0] == 'inkscape':
print('Listening!')
listen = threading.Thread(target=create, args=[window.id])
listen.start()
except Xlib.error.BadWindow:
pass
if __name__ == '__main__':
main()
|
keycode = self.string_to_keycode(key)
self.inkscape.send_event(self.event(event.KeyPress, keycode, mask), propagate=True)
self.inkscape.send_event(self.event(event.KeyRelease, keycode, mask), propagate=True)
self.disp.flush()
self.disp.sync()
|
json-array-member.ts
|
import {isReflectMetadataSupported, logError, MISSING_REFLECT_CONF_MSG, nameof} from './helpers';
import {
CustomDeserializerParams,
CustomSerializerParams,
injectMetadataInformation,
} from './metadata';
import {extractOptionBase, OptionsBase} from './options-base';
import {
ArrayTypeDescriptor,
ensureTypeDescriptor,
ensureTypeThunk,
MaybeTypeThunk,
TypeDescriptor,
TypeThunk,
} from './type-descriptor';
declare abstract class Reflect {
static getMetadata(metadataKey: string, target: any, targetKey: string | symbol): any;
}
export interface IJsonArrayMemberOptions extends OptionsBase {
/** When set, indicates that the member must be present when deserializing. */
isRequired?: boolean | null;
/** When set, an empty array is emitted if the property is undefined/uninitialized. */
emitDefaultValue?: boolean | null;
/** Sets array dimensions (e.g. 1 for 'number[]' or 2 for 'number[][]'). Defaults to 1. */
dimensions?: number | null;
/** When set, the key on the JSON that should be used instead of the class property name */
name?: string | null;
/**
* When set, this deserializer will be used to deserialize the member. The callee must assure
* the correct type.
*/
deserializer?: ((json: any, params: CustomDeserializerParams) => any) | null;
/** When set, this serializer will be used to serialize the member. */
serializer?: ((value: any, params: CustomSerializerParams) => any) | null;
}
/**
* Specifies that a property, of type array, is part of an object when serializing.
* @param maybeTypeThunk Constructor of array elements (e.g. 'Number' for 'number[]', or 'Date'
* for 'Date[]').
* @param options Additional options.
*/
export function jsonArrayMember(
maybeTypeThunk: MaybeTypeThunk,
options: IJsonArrayMemberOptions = {},
) {
return (target: Object, propKey: string | symbol) => {
const decoratorName =
`@jsonArrayMember on ${nameof(target.constructor)}.${String(propKey)}`;
const typeThunk: TypeThunk = ensureTypeThunk(maybeTypeThunk, decoratorName);
const dimensions = options.dimensions == null ? 1 : options.dimensions;
if (!isNaN(dimensions) && dimensions < 1) {
logError(`${decoratorName}: 'dimensions' option must be at least 1.`);
return;
}
// If ReflectDecorators is available, use it to check whether 'jsonArrayMember' has been
// used on an array.
const reflectedType = isReflectMetadataSupported
? Reflect.getMetadata('design:type', target, propKey)
|
return;
}
injectMetadataInformation(target, propKey, {
type: () => createArrayType(ensureTypeDescriptor(typeThunk()), dimensions),
emitDefaultValue: options.emitDefaultValue,
isRequired: options.isRequired,
options: extractOptionBase(options),
key: propKey.toString(),
name: options.name ?? propKey.toString(),
deserializer: options.deserializer,
serializer: options.serializer,
});
};
}
export function createArrayType(
elementType: TypeDescriptor,
dimensions: number,
): ArrayTypeDescriptor {
let type = new ArrayTypeDescriptor(elementType);
for (let i = 1; i < dimensions; ++i) {
type = new ArrayTypeDescriptor(type);
}
return type;
}
|
: null;
if (reflectedType != null && reflectedType !== Array && reflectedType !== Object) {
logError(`${decoratorName}: property is not an Array. ${MISSING_REFLECT_CONF_MSG}`);
|
command.ts
|
import { Command } from 'packages/common/types/util/domain';
import { ApiV1Request } from './types';
export enum ApiV1CommandTypes {
SEND_API_V1_REQUEST = 'SEND_API_V1_REQUEST-47406dac-1dc9-4831-a20a-ac917a944ddb',
}
export type SendApiV1Request = Command<ApiV1CommandTypes.SEND_API_V1_REQUEST, { request: ApiV1Request }>;
export function
|
(request: ApiV1Request): SendApiV1Request {
return {
type: ApiV1CommandTypes.SEND_API_V1_REQUEST,
payload: { request },
};
}
|
createSendApiV1Request
|
get-transaction.test.ts
|
describe('getTransaction', () => {
for (const fixture of fixtures) {
test(fixture.testName, async () => {
const mock1 = sinon.stub(blockfrostAPI, 'txs').resolves(fixture.txs);
const result = await getTransaction(
1,
'28172ea876c3d1e691284e5179fae2feb3e69d7d41e43f8023dc380115741026',
);
expect(result).toBe(JSON.stringify(fixture.result));
mock1.restore();
});
}
});
|
import sinon from 'sinon';
import fixtures from '../../fixtures/getTransaction';
import { blockfrostAPI } from '../../../../src/utils/blockfrost-api';
import getTransaction from '../../../../src/methods/get-transaction';
|
|
filter_by_current_user_with_on_request_builder.go
|
package filterbycurrentuserwithon
import (
i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go"
)
// FilterByCurrentUserWithOnRequestBuilder provides operations to call the filterByCurrentUser method.
type FilterByCurrentUserWithOnRequestBuilder struct {
// Path parameters for the request
pathParameters map[string]string
// The request adapter to use to execute the requests.
requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter
// Url template to use to build the URL for the current request builder
urlTemplate string
}
// FilterByCurrentUserWithOnRequestBuilderGetRequestConfiguration configuration for the request such as headers, query parameters, and middleware options.
type FilterByCurrentUserWithOnRequestBuilderGetRequestConfiguration struct {
// Request headers
Headers map[string]string
// Request options
Options []i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestOption
}
// NewFilterByCurrentUserWithOnRequestBuilderInternal instantiates a new FilterByCurrentUserWithOnRequestBuilder and sets the default values.
func NewFilterByCurrentUserWithOnRequestBuilderInternal(pathParameters map[string]string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter, on *string)(*FilterByCurrentUserWithOnRequestBuilder) {
m := &FilterByCurrentUserWithOnRequestBuilder{
}
m.urlTemplate = "{+baseurl}/me/appConsentRequestsForApproval/microsoft.graph.filterByCurrentUser(on='{on}')";
urlTplParams := make(map[string]string)
for idx, item := range pathParameters {
urlTplParams[idx] = item
}
if on != nil {
urlTplParams[""] = *on
}
m.pathParameters = urlTplParams;
m.requestAdapter = requestAdapter;
return m
}
// NewFilterByCurrentUserWithOnRequestBuilder instantiates a new FilterByCurrentUserWithOnRequestBuilder and sets the default values.
func NewFilterByCurrentUserWithOnRequestBuilder(rawUrl string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter)(*FilterByCurrentUserWithOnRequestBuilder) {
urlParams := make(map[string]string)
urlParams["request-raw-url"] = rawUrl
return NewFilterByCurrentUserWithOnRequestBuilderInternal(urlParams, requestAdapter, nil)
}
// CreateGetRequestInformation invoke function filterByCurrentUser
func (m *FilterByCurrentUserWithOnRequestBuilder) CreateGetRequestInformation()(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
return m.CreateGetRequestInformationWithRequestConfiguration(nil);
}
// CreateGetRequestInformationWithRequestConfiguration invoke function filterByCurrentUser
func (m *FilterByCurrentUserWithOnRequestBuilder) CreateGetRequestInformationWithRequestConfiguration(requestConfiguration *FilterByCurrentUserWithOnRequestBuilderGetRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
|
if requestConfiguration != nil {
requestInfo.AddRequestHeaders(requestConfiguration.Headers)
requestInfo.AddRequestOptions(requestConfiguration.Options)
}
return requestInfo, nil
}
// Get invoke function filterByCurrentUser
func (m *FilterByCurrentUserWithOnRequestBuilder) Get()(FilterByCurrentUserWithOnResponseable, error) {
return m.GetWithRequestConfigurationAndResponseHandler(nil, nil);
}
// GetWithRequestConfigurationAndResponseHandler invoke function filterByCurrentUser
func (m *FilterByCurrentUserWithOnRequestBuilder) GetWithRequestConfigurationAndResponseHandler(requestConfiguration *FilterByCurrentUserWithOnRequestBuilderGetRequestConfiguration, responseHandler i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ResponseHandler)(FilterByCurrentUserWithOnResponseable, error) {
requestInfo, err := m.CreateGetRequestInformationWithRequestConfiguration(requestConfiguration);
if err != nil {
return nil, err
}
res, err := m.requestAdapter.SendAsync(requestInfo, CreateFilterByCurrentUserWithOnResponseFromDiscriminatorValue, responseHandler, nil)
if err != nil {
return nil, err
}
return res.(FilterByCurrentUserWithOnResponseable), nil
}
|
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.GET
|
explainer.py
|
from lime import lime_tabular, lime_image
from scipy.misc import imresize
import numpy as np
import tensorflow as tf
class TabularExplainer:
def __init__(self, dataset, verbose=True):
train_dataset, training_labels = dataset.make_numpy_array(dataset.get_train_file())
mode = dataset.get_mode()
categorical_features, categorical_index, categorical_names = dataset.get_categorical_features()
unique = dataset.get_target_labels()
self._mode = mode
self.dataset = dataset
self._explainer = lime_tabular.LimeTabularExplainer(train_dataset,
feature_names=dataset.get_feature_names(),
class_names=unique,
categorical_features=categorical_index,
categorical_names=categorical_names,
training_labels=training_labels,
verbose=verbose,
mode=self._mode)
def explain_instance(self, model, features, num_features=5, top_labels=3, sel_target=None):
sample = self.dataset.create_feat_array(features)
features = {k: features[k] for k in self.dataset.get_feature_names()}
def predict_fn(x):
x = x.reshape(-1, len(features))
local_features = {k: x[:, i] for i, k in enumerate(features.keys())}
local_features = self.dataset.from_array(local_features)
predict_input_fn = tf.estimator.inputs.numpy_input_fn(x=local_features,
y=None, num_epochs=1, shuffle=False)
with tf.device('/cpu:0'): # TODO maybe check if gpu is free
predictions = list(model.predict(input_fn=predict_input_fn))
if self._mode == 'classification':
return np.array([x['probabilities'] for x in predictions])
if sel_target:
tidx = self.dataset.get_targets().index(sel_target)
return np.array([x['predictions'][tidx] for x in predictions]).reshape(-1)
return np.array([x['predictions'] for x in predictions]).reshape(-1)
if self._mode == 'classification':
return self._explainer.explain_instance(sample, predict_fn, num_features=num_features,
top_labels=top_labels)
return self._explainer.explain_instance(sample, predict_fn, num_features=num_features)
class ImageExplainer:
def __init__(self, dataset, verbose=True):
self._dataset = dataset
self._explainer = lime_image.LimeImageExplainer(verbose=verbose)
def explain_instance(self, model, features, num_features=5, top_labels=3, sel_target=None):
|
def predict_fn(x):
x = x.astype(np.float32)
x = np.apply_along_axis(self._dataset.normalize, 0, x)
predict_input_fn = tf.estimator.inputs.numpy_input_fn(x=x, y=None, num_epochs=1, shuffle=False)
with tf.device('/cpu:0'): # TODO maybe check if gpu is free
probabilities = list(model.predict(input_fn=predict_input_fn))
return np.array([x['probabilities'] for x in probabilities])
features = imresize(features, self._dataset.get_image_size(), interp='bilinear')
explain_result = self._explainer.explain_instance(features, predict_fn, batch_size=100,
num_features=num_features,
labels=self._dataset.get_class_names(),
top_labels=len(self._dataset.get_class_names()))
features = features.astype(np.float32)
features = self._dataset.normalize(features)
predict_input_fn = tf.estimator.inputs.numpy_input_fn(x=features[np.newaxis, ...], y=None, num_epochs=1,
shuffle=False)
with tf.device('/cpu:0'): # TODO maybe check if gpu is free
predictions = list(model.predict(input_fn=predict_input_fn))
return explain_result, predictions[0]['probabilities']
|
|
client_http.go
|
package auth
import (
"context"
"io"
"net/http"
"net/url"
"strings"
"sync/atomic"
"github.com/kiteco/kiteco/kite-go/community"
"github.com/kiteco/kiteco/kite-golib/errors"
)
// Get performs an authenticated GET request with the provided path relative to the target
func (c *Client) Get(ctx context.Context, path string) (*http.Response, error) {
c.mu.RLock()
defer c.mu.RUnlock()
req, err := c.makeRequestLocked("GET", path, "", nil, true)
if err != nil {
return nil, err
}
return c.doHTTPLocked(ctx, req)
}
// getNoHMAC performs an session-only GET request with the provided path relative to the target
func (c *Client) getNoHMAC(ctx context.Context, path string) (*http.Response, error) {
c.mu.RLock()
defer c.mu.RUnlock()
return c.getNoHMACLocked(ctx, path)
}
// getNoHMACLocked performs an session-only GET request with the provided path relative to the target
func (c *Client) getNoHMACLocked(ctx context.Context, path string) (*http.Response, error) {
req, err := c.makeRequestLocked("GET", path, "", nil, false)
if err != nil {
return nil, err
}
return c.doHTTPLocked(ctx, req)
}
// Post performs an authenticated POST request with the provided path relative to the target
func (c *Client) Post(ctx context.Context, path, contentType string, body io.Reader) (*http.Response, error) {
c.mu.RLock()
defer c.mu.RUnlock()
req, err := c.makeRequestLocked("POST", path, contentType, body, true)
if err != nil {
return nil, err
}
return c.doHTTPLocked(ctx, req)
}
// postForm performs an authenticated POST request with the provided path relative to the target
func (c *Client) postForm(ctx context.Context, url string, data url.Values) (*http.Response, error) {
return c.Post(ctx, url, "application/x-www-form-urlencoded", strings.NewReader(data.Encode()))
}
// NewRequest parallels http.NewRequest, but takes in a path. The proxy will fill in the full path relative to the target.
func (c *Client) NewRequest(method, path, contentType string, body io.Reader) (*http.Request, error) {
c.mu.RLock()
defer c.mu.RUnlock()
return c.makeRequestLocked(method, path, contentType, body, true)
}
// Do performs the provided HTTP request, it adds the HMAC headers
func (c *Client) Do(ctx context.Context, req *http.Request) (*http.Response, error) {
c.mu.RLock()
defer c.mu.RUnlock()
return c.doHTTPLocked(ctx, c.addAuth(req, true))
}
// doHTTPLocked performs the HTTP request, it does not change the request's properties.
func (c *Client) doHTTPLocked(ctx context.Context, req *http.Request) (*http.Response, error) {
if c.client == nil {
return nil, errors.New("http client not set")
}
resp, err := c.client.Do(req.WithContext(ctx))
if err == nil {
c.token.UpdateFromHeader(resp.Header)
}
return c.wrap(resp), err
}
// --
// makeRequestLocked returns a pointer to a new http request object
func (c *Client) makeRequestLocked(method, path string, contentType string, body io.Reader, hmac bool) (*http.Request, error) {
if c.target == nil {
return nil, errors.Errorf("target not defined")
}
endpoint, err := c.target.Parse(path)
if err != nil {
return nil, err
}
req, err := http.NewRequest(method, endpoint.String(), body)
if err != nil {
return nil, err
}
if contentType != "" {
req.Header.Set("Content-Type", contentType)
}
return c.addAuth(req, hmac), nil
}
// addAuth should be idempotent
func (c *Client) addAuth(req *http.Request, hmac bool) *http.Request {
req.Header.Set(community.MachineHeader, c.machineID)
if hmac {
c.token.AddToHeader(req.Header)
}
return req
}
// wrap wraps a response to update the number of open connections
func (c *Client) wrap(resp *http.Response) *http.Response {
if resp == nil {
return nil
|
atomic.AddInt64(&c.openedConnections, 1)
resp.Body = &bodyWrapper{body: resp.Body, client: c}
return resp
}
// bodyWrapper is a simple wrapper to decrement to number of open connections on Close()
type bodyWrapper struct {
body io.ReadCloser
client *Client
}
func (b *bodyWrapper) Read(buf []byte) (int, error) {
return b.body.Read(buf)
}
func (b *bodyWrapper) Close() error {
atomic.AddInt64(&b.client.closedConnections, 1)
return b.body.Close()
}
|
}
|
niatelemetry_supervisor_module_details.py
|
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.asset_device_registration_relationship import AssetDeviceRegistrationRelationship
from intersight.model.display_names import DisplayNames
from intersight.model.mo_base_mo import MoBaseMo
from intersight.model.mo_base_mo_relationship import MoBaseMoRelationship
from intersight.model.mo_tag import MoTag
from intersight.model.mo_version_context import MoVersionContext
from intersight.model.niatelemetry_supervisor_module_details_all_of import NiatelemetrySupervisorModuleDetailsAllOf
globals()['AssetDeviceRegistrationRelationship'] = AssetDeviceRegistrationRelationship
globals()['DisplayNames'] = DisplayNames
globals()['MoBaseMo'] = MoBaseMo
globals()['MoBaseMoRelationship'] = MoBaseMoRelationship
globals()['MoTag'] = MoTag
globals()['MoVersionContext'] = MoVersionContext
globals()['NiatelemetrySupervisorModuleDetailsAllOf'] = NiatelemetrySupervisorModuleDetailsAllOf
class NiatelemetrySupervisorModuleDetails(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('class_id',): {
'NIATELEMETRY.SUPERVISORMODULEDETAILS': "niatelemetry.SupervisorModuleDetails",
},
('object_type',): {
'NIATELEMETRY.SUPERVISORMODULEDETAILS': "niatelemetry.SupervisorModuleDetails",
},
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'class_id': (str,), # noqa: E501
'object_type': (str,), # noqa: E501
'dn': (str,), # noqa: E501
'hw_ver': (str,), # noqa: E501
'model': (str,), # noqa: E501
'record_type': (str,), # noqa: E501
'record_version': (str,), # noqa: E501
'serial': (str,), # noqa: E501
'site_name': (str,), # noqa: E501
'registered_device': (AssetDeviceRegistrationRelationship,), # noqa: E501
'account_moid': (str,), # noqa: E501
'create_time': (datetime,), # noqa: E501
'domain_group_moid': (str,), # noqa: E501
'mod_time': (datetime,), # noqa: E501
'moid': (str,), # noqa: E501
'owners': ([str], none_type,), # noqa: E501
'shared_scope': (str,), # noqa: E501
'tags': ([MoTag], none_type,), # noqa: E501
'version_context': (MoVersionContext,), # noqa: E501
'ancestors': ([MoBaseMoRelationship], none_type,), # noqa: E501
'parent': (MoBaseMoRelationship,), # noqa: E501
'permission_resources': ([MoBaseMoRelationship], none_type,), # noqa: E501
'display_names': (DisplayNames,), # noqa: E501
}
@cached_property
def discriminator():
val = {
}
if not val:
return None
return {'class_id': val}
attribute_map = {
'class_id': 'ClassId', # noqa: E501
'object_type': 'ObjectType', # noqa: E501
'dn': 'Dn', # noqa: E501
'hw_ver': 'HwVer', # noqa: E501
'model': 'Model', # noqa: E501
'record_type': 'RecordType', # noqa: E501
'record_version': 'RecordVersion', # noqa: E501
'serial': 'Serial', # noqa: E501
'site_name': 'SiteName', # noqa: E501
'registered_device': 'RegisteredDevice', # noqa: E501
'account_moid': 'AccountMoid', # noqa: E501
'create_time': 'CreateTime', # noqa: E501
'domain_group_moid': 'DomainGroupMoid', # noqa: E501
'mod_time': 'ModTime', # noqa: E501
'moid': 'Moid', # noqa: E501
'owners': 'Owners', # noqa: E501
'shared_scope': 'SharedScope', # noqa: E501
'tags': 'Tags', # noqa: E501
'version_context': 'VersionContext', # noqa: E501
'ancestors': 'Ancestors', # noqa: E501
'parent': 'Parent', # noqa: E501
'permission_resources': 'PermissionResources', # noqa: E501
'display_names': 'DisplayNames', # noqa: E501
}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""NiatelemetrySupervisorModuleDetails - a model defined in OpenAPI
Args:
Keyword Args:
class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "niatelemetry.SupervisorModuleDetails", must be one of ["niatelemetry.SupervisorModuleDetails", ] # noqa: E501
object_type (str): The fully-qualified name of the instantiated, concrete type. The value should be the same as the 'ClassId' property.. defaults to "niatelemetry.SupervisorModuleDetails", must be one of ["niatelemetry.SupervisorModuleDetails", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
dn (str): Dn of the supervisor module in APIC.. [optional] # noqa: E501
hw_ver (str): Hardware version of supervisor module.. [optional] # noqa: E501
model (str): Model of the supervisor module.. [optional] # noqa: E501
record_type (str): Type of record DCNM / APIC / SE. This determines the type of platform where inventory was collected.. [optional] # noqa: E501
record_version (str): Version of record being pushed. This determines what was the API version for data available from the device.. [optional] # noqa: E501
serial (str): Serial number of the supervisor module.. [optional] # noqa: E501
site_name (str): Name of the APIC site from which this data is being collected.. [optional] # noqa: E501
registered_device (AssetDeviceRegistrationRelationship): [optional] # noqa: E501
account_moid (str): The Account ID for this managed object.. [optional] # noqa: E501
create_time (datetime): The time when this managed object was created.. [optional] # noqa: E501
domain_group_moid (str): The DomainGroup ID for this managed object.. [optional] # noqa: E501
mod_time (datetime): The time when this managed object was last modified.. [optional] # noqa: E501
moid (str): The unique identifier of this Managed Object instance.. [optional] # noqa: E501
owners ([str], none_type): [optional] # noqa: E501
shared_scope (str): Intersight provides pre-built workflows, tasks and policies to end users through global catalogs. Objects that are made available through global catalogs are said to have a 'shared' ownership. Shared objects are either made globally available to all end users or restricted to end users based on their license entitlement. Users can use this property to differentiate the scope (global or a specific license tier) to which a shared MO belongs.. [optional] # noqa: E501
tags ([MoTag], none_type): [optional] # noqa: E501
version_context (MoVersionContext): [optional] # noqa: E501
ancestors ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
parent (MoBaseMoRelationship): [optional] # noqa: E501
permission_resources ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501
display_names (DisplayNames): [optional] # noqa: E501
"""
class_id = kwargs.get('class_id', "niatelemetry.SupervisorModuleDetails")
object_type = kwargs.get('object_type', "niatelemetry.SupervisorModuleDetails")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
required_args = {
'class_id': class_id,
'object_type': object_type,
}
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(
constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in kwargs.items():
if var_name in unused_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
not self._additional_properties_model_instances:
# discard variable.
continue
setattr(self, var_name, var_value)
@cached_property
def
|
():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
MoBaseMo,
NiatelemetrySupervisorModuleDetailsAllOf,
],
'oneOf': [
],
}
|
_composed_schemas
|
privateEndpointConnection.go
|
// *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v20210601preview
import (
"context"
"reflect"
"github.com/pkg/errors"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
type PrivateEndpointConnection struct {
pulumi.CustomResourceState
Etag pulumi.StringPtrOutput `pulumi:"etag"`
Location pulumi.StringOutput `pulumi:"location"`
Name pulumi.StringOutput `pulumi:"name"`
PrivateEndpoint PrivateEndpointResponsePtrOutput `pulumi:"privateEndpoint"`
PrivateLinkServiceConnectionState PrivateLinkServiceConnectionStateResponsePtrOutput `pulumi:"privateLinkServiceConnectionState"`
ProvisioningState pulumi.StringOutput `pulumi:"provisioningState"`
Tags pulumi.StringMapOutput `pulumi:"tags"`
Type pulumi.StringOutput `pulumi:"type"`
}
// NewPrivateEndpointConnection registers a new resource with the given unique name, arguments, and options.
func NewPrivateEndpointConnection(ctx *pulumi.Context,
name string, args *PrivateEndpointConnectionArgs, opts ...pulumi.ResourceOption) (*PrivateEndpointConnection, error) {
if args == nil {
return nil, errors.New("missing one or more required arguments")
}
if args.ResourceGroupName == nil {
return nil, errors.New("invalid value for required argument 'ResourceGroupName'")
}
if args.VaultName == nil {
return nil, errors.New("invalid value for required argument 'VaultName'")
}
aliases := pulumi.Aliases([]pulumi.Alias{
{
Type: pulumi.String("azure-nextgen:keyvault/v20210601preview:PrivateEndpointConnection"),
},
{
Type: pulumi.String("azure-native:keyvault:PrivateEndpointConnection"),
},
{
Type: pulumi.String("azure-nextgen:keyvault:PrivateEndpointConnection"),
},
{
Type: pulumi.String("azure-native:keyvault/v20180214:PrivateEndpointConnection"),
},
{
Type: pulumi.String("azure-nextgen:keyvault/v20180214:PrivateEndpointConnection"),
},
{
Type: pulumi.String("azure-native:keyvault/v20190901:PrivateEndpointConnection"),
},
{
Type: pulumi.String("azure-nextgen:keyvault/v20190901:PrivateEndpointConnection"),
},
{
Type: pulumi.String("azure-native:keyvault/v20200401preview:PrivateEndpointConnection"),
},
{
Type: pulumi.String("azure-nextgen:keyvault/v20200401preview:PrivateEndpointConnection"),
},
{
Type: pulumi.String("azure-native:keyvault/v20210401preview:PrivateEndpointConnection"),
},
{
Type: pulumi.String("azure-nextgen:keyvault/v20210401preview:PrivateEndpointConnection"),
},
})
opts = append(opts, aliases)
var resource PrivateEndpointConnection
err := ctx.RegisterResource("azure-native:keyvault/v20210601preview:PrivateEndpointConnection", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// GetPrivateEndpointConnection gets an existing PrivateEndpointConnection resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func GetPrivateEndpointConnection(ctx *pulumi.Context,
name string, id pulumi.IDInput, state *PrivateEndpointConnectionState, opts ...pulumi.ResourceOption) (*PrivateEndpointConnection, error) {
var resource PrivateEndpointConnection
err := ctx.ReadResource("azure-native:keyvault/v20210601preview:PrivateEndpointConnection", name, id, state, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// Input properties used for looking up and filtering PrivateEndpointConnection resources.
type privateEndpointConnectionState struct {
}
type PrivateEndpointConnectionState struct {
}
func (PrivateEndpointConnectionState) ElementType() reflect.Type {
return reflect.TypeOf((*privateEndpointConnectionState)(nil)).Elem()
}
type privateEndpointConnectionArgs struct {
Etag *string `pulumi:"etag"`
PrivateEndpointConnectionName *string `pulumi:"privateEndpointConnectionName"`
PrivateLinkServiceConnectionState *PrivateLinkServiceConnectionState `pulumi:"privateLinkServiceConnectionState"`
ResourceGroupName string `pulumi:"resourceGroupName"`
VaultName string `pulumi:"vaultName"`
}
// The set of arguments for constructing a PrivateEndpointConnection resource.
type PrivateEndpointConnectionArgs struct {
Etag pulumi.StringPtrInput
PrivateEndpointConnectionName pulumi.StringPtrInput
PrivateLinkServiceConnectionState PrivateLinkServiceConnectionStatePtrInput
ResourceGroupName pulumi.StringInput
VaultName pulumi.StringInput
}
func (PrivateEndpointConnectionArgs) ElementType() reflect.Type {
return reflect.TypeOf((*privateEndpointConnectionArgs)(nil)).Elem()
}
type PrivateEndpointConnectionInput interface {
pulumi.Input
ToPrivateEndpointConnectionOutput() PrivateEndpointConnectionOutput
ToPrivateEndpointConnectionOutputWithContext(ctx context.Context) PrivateEndpointConnectionOutput
}
func (*PrivateEndpointConnection) ElementType() reflect.Type {
return reflect.TypeOf((*PrivateEndpointConnection)(nil))
}
func (i *PrivateEndpointConnection) ToPrivateEndpointConnectionOutput() PrivateEndpointConnectionOutput {
return i.ToPrivateEndpointConnectionOutputWithContext(context.Background())
}
func (i *PrivateEndpointConnection) ToPrivateEndpointConnectionOutputWithContext(ctx context.Context) PrivateEndpointConnectionOutput {
return pulumi.ToOutputWithContext(ctx, i).(PrivateEndpointConnectionOutput)
}
type PrivateEndpointConnectionOutput struct{ *pulumi.OutputState }
func (PrivateEndpointConnectionOutput) ElementType() reflect.Type {
return reflect.TypeOf((*PrivateEndpointConnection)(nil))
}
func (o PrivateEndpointConnectionOutput) ToPrivateEndpointConnectionOutput() PrivateEndpointConnectionOutput {
return o
}
func (o PrivateEndpointConnectionOutput) ToPrivateEndpointConnectionOutputWithContext(ctx context.Context) PrivateEndpointConnectionOutput {
return o
}
func
|
() {
pulumi.RegisterOutputType(PrivateEndpointConnectionOutput{})
}
|
init
|
project_group_info.py
|
import functools
from spaceone.api.core.v1 import tag_pb2
from spaceone.api.identity.v1 import project_group_pb2
from spaceone.core.pygrpc.message_type import *
from spaceone.identity.model.project_model import Project
from spaceone.identity.model.project_group_model import ProjectGroup
from spaceone.identity.info.role_info import RoleInfo
__all__ = ['ProjectGroupInfo', 'ProjectGroupsInfo', 'ProjectGroupRoleBindingInfo', 'ProjectGroupRoleBindingsInfo',
'ProjectGroupProjectsInfo']
def ProjectGroupInfo(project_group_vo: ProjectGroup, minimal=False):
info = {
'project_group_id': project_group_vo.project_group_id,
'name': project_group_vo.name
}
if not minimal:
if project_group_vo.parent_project_group:
info.update({
'parent_project_group_info': ProjectGroupInfo(project_group_vo.parent_project_group, minimal=True)
})
info.update({
'tags': [tag_pb2.Tag(key=tag.key, value=tag.value) for tag in project_group_vo.tags],
'domain_id': project_group_vo.domain_id,
'created_by': project_group_vo.created_by,
'created_at': change_timestamp_type(project_group_vo.created_at)
})
# Temporary code for DB migration
if not project_group_vo.parent_project_group_id and project_group_vo.parent_project_group:
project_group_vo.update({'parent_project_group_id': project_group_vo.parent_project_group.project_group_id})
return project_group_pb2.ProjectGroupInfo(**info)
def ProjectGroupsInfo(project_group_vos, total_count, **kwargs):
results = list(map(functools.partial(ProjectGroupInfo, **kwargs), project_group_vos))
return project_group_pb2.ProjectGroupsInfo(results=results, total_count=total_count)
def ProjectGroupRoleBindingInfo(role_binding_vo):
info = {
'role_binding_id': role_binding_vo.role_binding_id,
'resource_type': role_binding_vo.resource_type,
'resource_id': role_binding_vo.resource_id,
'role_info': RoleInfo(role_binding_vo.role, minimal=True),
'project_group_info': ProjectGroupInfo(role_binding_vo.project_group, minimal=True),
'labels': role_binding_vo.labels,
'tags': [tag_pb2.Tag(key=tag.key, value=tag.value) for tag in role_binding_vo.tags],
'domain_id': role_binding_vo.domain_id,
'created_at': change_timestamp_type(role_binding_vo.created_at)
}
return project_group_pb2.ProjectGroupRoleBindingInfo(**info)
def ProjectGroupRoleBindingsInfo(role_binding_vos, total_count, **kwargs):
results = list(map(ProjectGroupRoleBindingInfo, role_binding_vos))
return project_group_pb2.ProjectGroupRoleBindingsInfo(results=results, total_count=total_count)
def ProjectGroupProjectInfo(project_vo: Project, minimal=False):
info = {
'project_id': project_vo.project_id,
'name': project_vo.name
}
if not minimal:
if project_vo.project_group:
info.update({
'project_group_info': ProjectGroupInfo(project_vo.project_group, minimal=True)
})
info.update({
'tags': [tag_pb2.Tag(key=tag.key, value=tag.value) for tag in project_vo.tags],
'domain_id': project_vo.domain_id,
'created_by': project_vo.created_by,
'created_at': change_timestamp_type(project_vo.created_at)
})
return project_group_pb2.ProjectGroupProjectInfo(**info)
def ProjectGroupProjectsInfo(project_vos, total_count, **kwargs):
|
results = list(map(functools.partial(ProjectGroupProjectInfo, **kwargs), project_vos))
return project_group_pb2.ProjectGroupProjectsInfo(results=results, total_count=total_count)
|
|
mod.rs
|
//routing table is here
extern crate futures;
extern crate hyper;
use self::futures::future;
use self::hyper::rt::{Future};
use self::hyper::service::service_fn;
use self::hyper::{Body, Method, Request, Response, Server, StatusCode};
/// We need to return different futures depending on the route matched,
/// and we can do that with an enum, such as `futures::Either`, or with
/// trait objects.
///
/// A boxed Future (trait object) is used as it is easier to understand
/// and extend with more types. Advanced users could switch to `Either`.
type BoxFut = Box<Future<Item = Response<Body>, Error = hyper::Error> + Send>;
/// This is our service handler. It receives a Request, routes on its
/// path, and returns a Future of a Response.
fn echo(req: Request<Body>) -> BoxFut {
let mut response = Response::new(Body::empty());
match (req.method(), req.uri().path()) {
// Serve some instructions at /
(&Method::GET, "/") => {
*response.body_mut() = Body::from("Try see currend address(es) /address(es)");
}
(&Method::GET, "/addresses") => {
// *response.body_mut() = Body::from(get_addresses());
*response.body_mut() = Body::from(super::sia::wallet::get_addresses_2());
}
(&Method::GET, "/address") => {
// *response.body_mut() = Body::from(get_addresses());
*response.body_mut() = Body::from(super::sia::wallet::get_first_address());
}
// The 404 Not Found route...
_ => {
*response.status_mut() = StatusCode::NOT_FOUND;
}
};
Box::new(future::ok(response))
}
pub fn
|
() {
let addr = ([127, 0, 0, 1], 3000).into();
let server = Server::bind(&addr)
.serve(|| service_fn(echo))
.map_err(|e| eprintln!("server error: {}", e));
println!("Listening on http://{}", addr);
hyper::rt::run(server);
}
|
run_server
|
one_hot_encoder_transformer.py
|
import pandas as pd
from sklearn.base import BaseEstimator, TransformerMixin
class OneHotEncoderTransformer(BaseEstimator, TransformerMixin):
def __init__(self, columns) -> None:
self.columns = columns
def fit(self, X, y=None):
return self
def
|
(self, X, y=None):
X = pd.get_dummies(X, columns=self.columns)
return X
|
transform
|
common.go
|
// Copyright 2020, OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package splunk // import "github.com/rati3l/opentelemetry-collector-contrib/internal/splunk"
import (
"encoding/json"
"strconv"
"strings"
)
// Constants for Splunk components.
const (
SFxAccessTokenHeader = "X-Sf-Token" // #nosec
SFxAccessTokenLabel = "com.splunk.signalfx.access_token" // #nosec
SFxEventCategoryKey = "com.splunk.signalfx.event_category"
SFxEventPropertiesKey = "com.splunk.signalfx.event_properties"
DefaultSourceTypeLabel = "com.splunk.sourcetype"
DefaultSourceLabel = "com.splunk.source"
DefaultIndexLabel = "com.splunk.index"
DefaultNameLabel = "otel.log.name"
DefaultSeverityTextLabel = "otel.log.severity.text"
DefaultSeverityNumberLabel = "otel.log.severity.number"
HECTokenHeader = "Splunk"
HecTokenLabel = "com.splunk.hec.access_token" // #nosec
// HecEventMetricType is the type of HEC event. Set to metric, as per https://docs.splunk.com/Documentation/Splunk/8.0.3/Metrics/GetMetricsInOther.
HecEventMetricType = "metric"
DefaultRawPath = "/services/collector/raw"
)
// AccessTokenPassthroughConfig configures passing through access tokens.
type AccessTokenPassthroughConfig struct {
// AccessTokenPassthrough indicates whether to associate datapoints with an organization access token received in request.
AccessTokenPassthrough bool `mapstructure:"access_token_passthrough"`
}
// Event represents a metric in Splunk HEC format
type Event struct {
Time *float64 `json:"time,omitempty"` // optional epoch time - set to nil if the event timestamp is missing or unknown
Host string `json:"host"` // hostname
Source string `json:"source,omitempty"` // optional description of the source of the event; typically the app's name
SourceType string `json:"sourcetype,omitempty"` // optional name of a Splunk parsing configuration; this is usually inferred by Splunk
Index string `json:"index,omitempty"` // optional name of the Splunk index to store the event in; not required if the token has a default index set in Splunk
Event interface{} `json:"event"` // type of event: set to "metric" or nil if the event represents a metric, or is the payload of the event.
Fields map[string]interface{} `json:"fields,omitempty"` // dimensions and metric data
}
// IsMetric returns true if the Splunk event is a metric.
func (e Event) IsMetric() bool {
return e.Event == HecEventMetricType || (e.Event == nil && len(e.GetMetricValues()) > 0)
}
// GetMetricValues extracts metric key value pairs from a Splunk HEC metric.
func (e Event) GetMetricValues() map[string]interface{} {
values := map[string]interface{}{}
for k, v := range e.Fields {
if strings.HasPrefix(k, "metric_name:") {
values[k[12:]] = v
}
}
return values
}
// UnmarshalJSON unmarshals the JSON representation of an event
func (e *Event) UnmarshalJSON(b []byte) error {
rawEvent := struct {
Time interface{} `json:"time,omitempty"`
Host string `json:"host"`
Source string `json:"source,omitempty"`
SourceType string `json:"sourcetype,omitempty"`
Index string `json:"index,omitempty"`
Event interface{} `json:"event"`
Fields map[string]interface{} `json:"fields,omitempty"`
}{}
err := json.Unmarshal(b, &rawEvent)
if err != nil
|
*e = Event{
Host: rawEvent.Host,
Source: rawEvent.Source,
SourceType: rawEvent.SourceType,
Index: rawEvent.Index,
Event: rawEvent.Event,
Fields: rawEvent.Fields,
}
switch t := rawEvent.Time.(type) {
case float64:
e.Time = &t
case string:
{
time, err := strconv.ParseFloat(t, 64)
if err != nil {
return err
}
e.Time = &time
}
}
return nil
}
// HecToOtelAttrs defines the mapping of Splunk HEC metadata to attributes
type HecToOtelAttrs struct {
// Source indicates the mapping of the source field to a specific unified model attribute.
Source string `mapstructure:"source"`
// SourceType indicates the mapping of the sourcetype field to a specific unified model attribute.
SourceType string `mapstructure:"sourcetype"`
// Index indicates the mapping of the index field to a specific unified model attribute.
Index string `mapstructure:"index"`
// Host indicates the mapping of the host field to a specific unified model attribute.
Host string `mapstructure:"host"`
}
|
{
return err
}
|
data.go
|
// Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package connection
import (
"encoding/json"
"github.com/pkg/errors"
"github.com/elastic/beats/libbeat/common"
s "github.com/elastic/beats/libbeat/common/schema"
c "github.com/elastic/beats/libbeat/common/schema/mapstriface"
"github.com/elastic/beats/metricbeat/mb"
)
var (
schema = s.Schema{
"name": c.Str("name"),
"vhost": c.Str("vhost", s.Required),
"user": c.Str("user", s.Required),
"node": c.Str("node", s.Required),
"channels": c.Int("channels"),
"channel_max": c.Int("channel_max"),
"frame_max": c.Int("frame_max"),
"type": c.Str("type"),
"packet_count": s.Object{
"sent": c.Int("send_cnt"),
"received": c.Int("recv_cnt"),
"pending": c.Int("send_pend"),
},
"octet_count": s.Object{
"sent": c.Int("send_oct"),
"received": c.Int("recv_oct"),
},
"host": c.Str("host"),
"port": c.Int("port"),
"peer": s.Object{
"host": c.Str("peer_host"),
"port": c.Int("peer_port"),
},
}
)
func
|
(content []byte, r mb.ReporterV2, m *MetricSet) error {
var connections []map[string]interface{}
err := json.Unmarshal(content, &connections)
if err != nil {
return errors.Wrap(err, "error in unmarshal")
}
for _, node := range connections {
evt, err := eventMapping(node)
if err != nil {
m.Logger().Errorf("error in mapping: %s", err)
r.Error(err)
continue
}
if !r.Event(evt) {
return nil
}
}
return nil
}
func eventMapping(connection map[string]interface{}) (mb.Event, error) {
fields, err := schema.Apply(connection, s.FailOnRequired)
if err != nil {
return mb.Event{}, errors.Wrap(err, "error applying schema")
}
rootFields := common.MapStr{}
if v, err := fields.GetValue("user"); err == nil {
rootFields.Put("user.name", v)
fields.Delete("user")
}
moduleFields := common.MapStr{}
if v, err := fields.GetValue("vhost"); err == nil {
moduleFields.Put("vhost", v)
fields.Delete("vhost")
}
if v, err := fields.GetValue("node"); err == nil {
moduleFields.Put("node.name", v)
fields.Delete("node")
}
event := mb.Event{
MetricSetFields: fields,
RootFields: rootFields,
ModuleFields: moduleFields,
}
return event, nil
}
|
eventsMapping
|
config.rs
|
/** For now, dual-purposing editor and player config */
use once_cell::sync::OnceCell;
use serde::Deserialize;
use shared::domain::jig::module::body::_groups::cards::Mode;
use utils::prelude::*;
pub const MAX_LIST_WORDS: usize = 14;
pub static DUAL_LIST_CHAR_LIMIT: usize = 30;
pub static SINGLE_LIST_CHAR_LIMIT: usize = 30;
pub static CARD_TEXT_LIMIT_WIDTH: f64 = 150.0;
pub static CARD_TEXT_LIMIT_HEIGHT: f64 = 150.0;
macro_rules! config_path {
($e:tt) => {
concat!("../../../../../../../../config/", $e)
};
}
static EDITOR_CONFIG: OnceCell<EditorConfig> = OnceCell::new();
#[derive(Debug, Deserialize)]
struct EditorConfig {
init: InitConfig,
}
#[derive(Debug, Deserialize)]
struct InitConfig {
single_list_words: Vec<String>,
dual_list_words: Vec<(String, String)>,
}
pub fn init() {
let _ = EDITOR_CONFIG.set(
serde_json::from_str(include_str!(config_path!(
"module/_groups/cards/editor.json"
)))
.unwrap_ji(),
);
}
pub fn get_single_list_init_word(index: usize) -> Option<String>
|
pub fn get_dual_list_init_word(row: usize, col: usize) -> Option<String> {
EDITOR_CONFIG
.get()
.and_then(|config| {
config.init.dual_list_words.get(row).map(|words| {
if col == 0 {
&words.0
} else if col == 1 {
&words.1
} else {
panic!("no such column!");
}
})
})
.map(|s| s.to_string())
}
pub fn get_debug_pairs(mode: Mode) -> Vec<(String, String)> {
EDITOR_CONFIG
.get()
.map(|config| match mode {
Mode::Duplicate => config
.init
.single_list_words
.iter()
.skip(1)
.map(|word| (word.to_string(), word.to_string()))
.collect(),
Mode::WordsAndImages => config
.init
.single_list_words
.iter()
.map(|word| (word.to_string(), "".to_string()))
.collect(),
_ => config.init.dual_list_words.clone(),
})
.unwrap_ji()
}
|
{
EDITOR_CONFIG
.get()
.and_then(|config| config.init.single_list_words.get(index))
.map(|s| s.to_string())
}
|
securityDeviceGroup.go
|
// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package iot
import (
"context"
"reflect"
"github.com/pkg/errors"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
// Manages a Iot Security Device Group.
//
// ## Example Usage
//
// ```go
// package main
//
// import (
// "github.com/pulumi/pulumi-azure/sdk/v4/go/azure/core"
// "github.com/pulumi/pulumi-azure/sdk/v4/go/azure/iot"
// "github.com/pulumi/pulumi/sdk/v3/go/pulumi"
// )
//
// func main() {
// pulumi.Run(func(ctx *pulumi.Context) error {
// exampleResourceGroup, err := core.NewResourceGroup(ctx, "exampleResourceGroup", &core.ResourceGroupArgs{
// Location: pulumi.String("West Europe"),
// })
// if err != nil {
// return err
// }
// exampleIoTHub, err := iot.NewIoTHub(ctx, "exampleIoTHub", &iot.IoTHubArgs{
// ResourceGroupName: exampleResourceGroup.Name,
// Location: exampleResourceGroup.Location,
// Sku: &iot.IoTHubSkuArgs{
// Name: pulumi.String("S1"),
// Capacity: pulumi.Int(1),
// },
// })
// if err != nil {
// return err
// }
// exampleSecuritySolution, err := iot.NewSecuritySolution(ctx, "exampleSecuritySolution", &iot.SecuritySolutionArgs{
// ResourceGroupName: exampleResourceGroup.Name,
// Location: exampleResourceGroup.Location,
// DisplayName: pulumi.String("Iot Security Solution"),
// IothubIds: pulumi.StringArray{
// exampleIoTHub.ID(),
// },
// })
// if err != nil {
// return err
// }
// _, err = iot.NewSecurityDeviceGroup(ctx, "exampleSecurityDeviceGroup", &iot.SecurityDeviceGroupArgs{
// IothubId: exampleIoTHub.ID(),
// AllowRule: &iot.SecurityDeviceGroupAllowRuleArgs{
// ConnectionToIpsNotAlloweds: pulumi.StringArray{
// pulumi.String("10.0.0.0/24"),
// },
// },
// RangeRules: iot.SecurityDeviceGroupRangeRuleArray{
// &iot.SecurityDeviceGroupRangeRuleArgs{
// Type: pulumi.String("ActiveConnectionsNotInAllowedRange"),
// Min: pulumi.Int(0),
// Max: pulumi.Int(30),
// Duration: pulumi.String("PT5M"),
// },
// },
// }, pulumi.DependsOn([]pulumi.Resource{
// exampleSecuritySolution,
// }))
// if err != nil {
// return err
// }
// return nil
// })
// }
// ```
//
// ## Import
//
// Iot Security Device Group can be imported using the `resource id`, e.g.
//
// ```sh
// $ pulumi import azure:iot/securityDeviceGroup:SecurityDeviceGroup example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.Devices/iotHubs/hub1/providers/Microsoft.Security/deviceSecurityGroups/group1
// ```
type SecurityDeviceGroup struct {
pulumi.CustomResourceState
// an `allowRule` blocks as defined below.
AllowRule SecurityDeviceGroupAllowRulePtrOutput `pulumi:"allowRule"`
// The ID of the IoT Hub which to link the Security Device Group to. Changing this forces a new resource to be created.
IothubId pulumi.StringOutput `pulumi:"iothubId"`
// Specifies the name of the Device Security Group. Changing this forces a new resource to be created.
Name pulumi.StringOutput `pulumi:"name"`
// One or more `rangeRule` blocks as defined below.
RangeRules SecurityDeviceGroupRangeRuleArrayOutput `pulumi:"rangeRules"`
}
// NewSecurityDeviceGroup registers a new resource with the given unique name, arguments, and options.
func NewSecurityDeviceGroup(ctx *pulumi.Context,
name string, args *SecurityDeviceGroupArgs, opts ...pulumi.ResourceOption) (*SecurityDeviceGroup, error) {
if args == nil {
return nil, errors.New("missing one or more required arguments")
}
if args.IothubId == nil {
return nil, errors.New("invalid value for required argument 'IothubId'")
}
var resource SecurityDeviceGroup
err := ctx.RegisterResource("azure:iot/securityDeviceGroup:SecurityDeviceGroup", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// GetSecurityDeviceGroup gets an existing SecurityDeviceGroup resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func GetSecurityDeviceGroup(ctx *pulumi.Context,
name string, id pulumi.IDInput, state *SecurityDeviceGroupState, opts ...pulumi.ResourceOption) (*SecurityDeviceGroup, error) {
var resource SecurityDeviceGroup
err := ctx.ReadResource("azure:iot/securityDeviceGroup:SecurityDeviceGroup", name, id, state, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// Input properties used for looking up and filtering SecurityDeviceGroup resources.
type securityDeviceGroupState struct {
// an `allowRule` blocks as defined below.
AllowRule *SecurityDeviceGroupAllowRule `pulumi:"allowRule"`
// The ID of the IoT Hub which to link the Security Device Group to. Changing this forces a new resource to be created.
IothubId *string `pulumi:"iothubId"`
// Specifies the name of the Device Security Group. Changing this forces a new resource to be created.
Name *string `pulumi:"name"`
// One or more `rangeRule` blocks as defined below.
RangeRules []SecurityDeviceGroupRangeRule `pulumi:"rangeRules"`
}
type SecurityDeviceGroupState struct {
// an `allowRule` blocks as defined below.
AllowRule SecurityDeviceGroupAllowRulePtrInput
// The ID of the IoT Hub which to link the Security Device Group to. Changing this forces a new resource to be created.
IothubId pulumi.StringPtrInput
// Specifies the name of the Device Security Group. Changing this forces a new resource to be created.
Name pulumi.StringPtrInput
// One or more `rangeRule` blocks as defined below.
RangeRules SecurityDeviceGroupRangeRuleArrayInput
}
func (SecurityDeviceGroupState) ElementType() reflect.Type {
return reflect.TypeOf((*securityDeviceGroupState)(nil)).Elem()
}
type securityDeviceGroupArgs struct {
// an `allowRule` blocks as defined below.
AllowRule *SecurityDeviceGroupAllowRule `pulumi:"allowRule"`
// The ID of the IoT Hub which to link the Security Device Group to. Changing this forces a new resource to be created.
IothubId string `pulumi:"iothubId"`
// Specifies the name of the Device Security Group. Changing this forces a new resource to be created.
Name *string `pulumi:"name"`
// One or more `rangeRule` blocks as defined below.
RangeRules []SecurityDeviceGroupRangeRule `pulumi:"rangeRules"`
}
// The set of arguments for constructing a SecurityDeviceGroup resource.
type SecurityDeviceGroupArgs struct {
// an `allowRule` blocks as defined below.
AllowRule SecurityDeviceGroupAllowRulePtrInput
// The ID of the IoT Hub which to link the Security Device Group to. Changing this forces a new resource to be created.
IothubId pulumi.StringInput
// Specifies the name of the Device Security Group. Changing this forces a new resource to be created.
Name pulumi.StringPtrInput
// One or more `rangeRule` blocks as defined below.
RangeRules SecurityDeviceGroupRangeRuleArrayInput
}
func (SecurityDeviceGroupArgs) ElementType() reflect.Type {
return reflect.TypeOf((*securityDeviceGroupArgs)(nil)).Elem()
}
type SecurityDeviceGroupInput interface {
pulumi.Input
ToSecurityDeviceGroupOutput() SecurityDeviceGroupOutput
ToSecurityDeviceGroupOutputWithContext(ctx context.Context) SecurityDeviceGroupOutput
}
func (*SecurityDeviceGroup) ElementType() reflect.Type {
return reflect.TypeOf((*SecurityDeviceGroup)(nil))
}
func (i *SecurityDeviceGroup) ToSecurityDeviceGroupOutput() SecurityDeviceGroupOutput {
return i.ToSecurityDeviceGroupOutputWithContext(context.Background())
}
func (i *SecurityDeviceGroup) ToSecurityDeviceGroupOutputWithContext(ctx context.Context) SecurityDeviceGroupOutput {
return pulumi.ToOutputWithContext(ctx, i).(SecurityDeviceGroupOutput)
}
func (i *SecurityDeviceGroup) ToSecurityDeviceGroupPtrOutput() SecurityDeviceGroupPtrOutput {
return i.ToSecurityDeviceGroupPtrOutputWithContext(context.Background())
}
func (i *SecurityDeviceGroup) ToSecurityDeviceGroupPtrOutputWithContext(ctx context.Context) SecurityDeviceGroupPtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(SecurityDeviceGroupPtrOutput)
}
type SecurityDeviceGroupPtrInput interface {
pulumi.Input
ToSecurityDeviceGroupPtrOutput() SecurityDeviceGroupPtrOutput
ToSecurityDeviceGroupPtrOutputWithContext(ctx context.Context) SecurityDeviceGroupPtrOutput
}
type securityDeviceGroupPtrType SecurityDeviceGroupArgs
func (*securityDeviceGroupPtrType) ElementType() reflect.Type {
return reflect.TypeOf((**SecurityDeviceGroup)(nil))
}
func (i *securityDeviceGroupPtrType) ToSecurityDeviceGroupPtrOutput() SecurityDeviceGroupPtrOutput {
return i.ToSecurityDeviceGroupPtrOutputWithContext(context.Background())
}
func (i *securityDeviceGroupPtrType) ToSecurityDeviceGroupPtrOutputWithContext(ctx context.Context) SecurityDeviceGroupPtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(SecurityDeviceGroupPtrOutput)
}
// SecurityDeviceGroupArrayInput is an input type that accepts SecurityDeviceGroupArray and SecurityDeviceGroupArrayOutput values.
// You can construct a concrete instance of `SecurityDeviceGroupArrayInput` via:
//
// SecurityDeviceGroupArray{ SecurityDeviceGroupArgs{...} }
type SecurityDeviceGroupArrayInput interface {
pulumi.Input
ToSecurityDeviceGroupArrayOutput() SecurityDeviceGroupArrayOutput
ToSecurityDeviceGroupArrayOutputWithContext(context.Context) SecurityDeviceGroupArrayOutput
}
type SecurityDeviceGroupArray []SecurityDeviceGroupInput
func (SecurityDeviceGroupArray) ElementType() reflect.Type {
return reflect.TypeOf((*[]*SecurityDeviceGroup)(nil)).Elem()
}
func (i SecurityDeviceGroupArray) ToSecurityDeviceGroupArrayOutput() SecurityDeviceGroupArrayOutput {
return i.ToSecurityDeviceGroupArrayOutputWithContext(context.Background())
}
func (i SecurityDeviceGroupArray) ToSecurityDeviceGroupArrayOutputWithContext(ctx context.Context) SecurityDeviceGroupArrayOutput {
return pulumi.ToOutputWithContext(ctx, i).(SecurityDeviceGroupArrayOutput)
|
// SecurityDeviceGroupMapInput is an input type that accepts SecurityDeviceGroupMap and SecurityDeviceGroupMapOutput values.
// You can construct a concrete instance of `SecurityDeviceGroupMapInput` via:
//
// SecurityDeviceGroupMap{ "key": SecurityDeviceGroupArgs{...} }
type SecurityDeviceGroupMapInput interface {
pulumi.Input
ToSecurityDeviceGroupMapOutput() SecurityDeviceGroupMapOutput
ToSecurityDeviceGroupMapOutputWithContext(context.Context) SecurityDeviceGroupMapOutput
}
type SecurityDeviceGroupMap map[string]SecurityDeviceGroupInput
func (SecurityDeviceGroupMap) ElementType() reflect.Type {
return reflect.TypeOf((*map[string]*SecurityDeviceGroup)(nil)).Elem()
}
func (i SecurityDeviceGroupMap) ToSecurityDeviceGroupMapOutput() SecurityDeviceGroupMapOutput {
return i.ToSecurityDeviceGroupMapOutputWithContext(context.Background())
}
func (i SecurityDeviceGroupMap) ToSecurityDeviceGroupMapOutputWithContext(ctx context.Context) SecurityDeviceGroupMapOutput {
return pulumi.ToOutputWithContext(ctx, i).(SecurityDeviceGroupMapOutput)
}
type SecurityDeviceGroupOutput struct{ *pulumi.OutputState }
func (SecurityDeviceGroupOutput) ElementType() reflect.Type {
return reflect.TypeOf((*SecurityDeviceGroup)(nil))
}
func (o SecurityDeviceGroupOutput) ToSecurityDeviceGroupOutput() SecurityDeviceGroupOutput {
return o
}
func (o SecurityDeviceGroupOutput) ToSecurityDeviceGroupOutputWithContext(ctx context.Context) SecurityDeviceGroupOutput {
return o
}
func (o SecurityDeviceGroupOutput) ToSecurityDeviceGroupPtrOutput() SecurityDeviceGroupPtrOutput {
return o.ToSecurityDeviceGroupPtrOutputWithContext(context.Background())
}
func (o SecurityDeviceGroupOutput) ToSecurityDeviceGroupPtrOutputWithContext(ctx context.Context) SecurityDeviceGroupPtrOutput {
return o.ApplyTWithContext(ctx, func(_ context.Context, v SecurityDeviceGroup) *SecurityDeviceGroup {
return &v
}).(SecurityDeviceGroupPtrOutput)
}
type SecurityDeviceGroupPtrOutput struct{ *pulumi.OutputState }
func (SecurityDeviceGroupPtrOutput) ElementType() reflect.Type {
return reflect.TypeOf((**SecurityDeviceGroup)(nil))
}
func (o SecurityDeviceGroupPtrOutput) ToSecurityDeviceGroupPtrOutput() SecurityDeviceGroupPtrOutput {
return o
}
func (o SecurityDeviceGroupPtrOutput) ToSecurityDeviceGroupPtrOutputWithContext(ctx context.Context) SecurityDeviceGroupPtrOutput {
return o
}
func (o SecurityDeviceGroupPtrOutput) Elem() SecurityDeviceGroupOutput {
return o.ApplyT(func(v *SecurityDeviceGroup) SecurityDeviceGroup {
if v != nil {
return *v
}
var ret SecurityDeviceGroup
return ret
}).(SecurityDeviceGroupOutput)
}
type SecurityDeviceGroupArrayOutput struct{ *pulumi.OutputState }
func (SecurityDeviceGroupArrayOutput) ElementType() reflect.Type {
return reflect.TypeOf((*[]SecurityDeviceGroup)(nil))
}
func (o SecurityDeviceGroupArrayOutput) ToSecurityDeviceGroupArrayOutput() SecurityDeviceGroupArrayOutput {
return o
}
func (o SecurityDeviceGroupArrayOutput) ToSecurityDeviceGroupArrayOutputWithContext(ctx context.Context) SecurityDeviceGroupArrayOutput {
return o
}
func (o SecurityDeviceGroupArrayOutput) Index(i pulumi.IntInput) SecurityDeviceGroupOutput {
return pulumi.All(o, i).ApplyT(func(vs []interface{}) SecurityDeviceGroup {
return vs[0].([]SecurityDeviceGroup)[vs[1].(int)]
}).(SecurityDeviceGroupOutput)
}
type SecurityDeviceGroupMapOutput struct{ *pulumi.OutputState }
func (SecurityDeviceGroupMapOutput) ElementType() reflect.Type {
return reflect.TypeOf((*map[string]SecurityDeviceGroup)(nil))
}
func (o SecurityDeviceGroupMapOutput) ToSecurityDeviceGroupMapOutput() SecurityDeviceGroupMapOutput {
return o
}
func (o SecurityDeviceGroupMapOutput) ToSecurityDeviceGroupMapOutputWithContext(ctx context.Context) SecurityDeviceGroupMapOutput {
return o
}
func (o SecurityDeviceGroupMapOutput) MapIndex(k pulumi.StringInput) SecurityDeviceGroupOutput {
return pulumi.All(o, k).ApplyT(func(vs []interface{}) SecurityDeviceGroup {
return vs[0].(map[string]SecurityDeviceGroup)[vs[1].(string)]
}).(SecurityDeviceGroupOutput)
}
func init() {
pulumi.RegisterInputType(reflect.TypeOf((*SecurityDeviceGroupInput)(nil)).Elem(), &SecurityDeviceGroup{})
pulumi.RegisterInputType(reflect.TypeOf((*SecurityDeviceGroupPtrInput)(nil)).Elem(), &SecurityDeviceGroup{})
pulumi.RegisterInputType(reflect.TypeOf((*SecurityDeviceGroupArrayInput)(nil)).Elem(), SecurityDeviceGroupArray{})
pulumi.RegisterInputType(reflect.TypeOf((*SecurityDeviceGroupMapInput)(nil)).Elem(), SecurityDeviceGroupMap{})
pulumi.RegisterOutputType(SecurityDeviceGroupOutput{})
pulumi.RegisterOutputType(SecurityDeviceGroupPtrOutput{})
pulumi.RegisterOutputType(SecurityDeviceGroupArrayOutput{})
pulumi.RegisterOutputType(SecurityDeviceGroupMapOutput{})
}
|
}
|
TestCar.py
|
from src.Car import Car
from src.CarImpl import CarImpl
from unittest.mock import *
from unittest import TestCase, main
class test_Car(TestCase):
def
|
(self):
car = Car()
car.needsFuel = Mock(name='needsFuel')
car.needsFuel.return_value = True
carImpl = CarImpl(car)
self.assertEqual(carImpl.car_needsFuel(), "You need to refuel soon!")
def test_needsfuel_false(self):
car = Car()
car.needsFuel = Mock(name='needsFuel')
car.needsFuel.return_value = False
carImpl = CarImpl(car)
self.assertEqual(carImpl.car_needsFuel(), "No need to refuel now")
def test_getenginetemperature_toocold(self):
car = Car()
car.getEngineTemperature = Mock(name='getEngineTemperature')
car.getEngineTemperature.return_value = 60
carImpl = CarImpl(car)
self.assertEqual(carImpl.car_getEngineTemperature(), "Engine is running too cold!")
def test_getenginetemperature_toohot(self):
car = Car()
car.getEngineTemperature = Mock(name='getEngineTemperature')
car.getEngineTemperature.return_value = 120
carImpl = CarImpl(car)
self.assertEqual(carImpl.car_getEngineTemperature(), "Engine is running too hot!")
def test_getenginetemperature_optimal(self):
car = Car()
car.getEngineTemperature = Mock(name='getEngineTemperature')
car.getEngineTemperature.return_value = 85
carImpl = CarImpl(car)
self.assertEqual(carImpl.car_getEngineTemperature(), "Engine is running at an optimal temperature")
def test_driveto(self):
car = Car()
car.driveTo = Mock(name='driveTo')
destination = "Szczecin"
car.driveTo.return_value = destination
carImpl = CarImpl(car)
self.assertEqual(carImpl.car_driveTo(destination), f"The car is headed to {destination}")
if __name__ == '__main__':
main()
|
test_needsfuel_true
|
skyscrapers.py
|
"""
https://github.com/Adeon18/skyscrapers
"""
def read_input(path: str) -> list:
"""
Read game board file from path.
Return list of str.
"""
with open(path, "r") as file:
output_lst = file.read().split("\n")
output_lst = output_lst[:-1]
return output_lst
def
|
(input_line: str, pivot: int) -> bool:
"""
Check row-wise visibility from left to right.
Return True if number of building from the left-most
hint is visible looking to the right,
False otherwise.
input_line - representing board row.
pivot - number on the left-most hint of the input_line.
>>> left_to_right_check("412453*", 4)
True
>>> left_to_right_check("452453*", 5)
False
>>> left_to_right_check("512345*", 5)
True
>>> left_to_right_check("4124531", 4)
True
"""
row = input_line
max_num = 0
count = 0
for _, num in enumerate(row[1:-1]):
# If the row is *, we move on to the next
if num == "*":
continue
# Check if the current building is the one we need
if int(num) > max_num:
max_num = int(num)
count += 1
if count == pivot:
return True
return False
def check_not_finished_board(board: list) -> bool:
"""
Check if skyscraper board is not finished, i.e.,
'?' present on the game board.
Return True if finished, False otherwise.
>>> check_not_finished_board(['***21**', '4?????*',\
'4?????*', '*?????5', '*?????*', '*?????*', '*2*1***'])
False
>>> check_not_finished_board(['***21**', '412453*',\
'423145*', '*543215', '*35214*', '*41532*', '*2*1***'])
True
>>> check_not_finished_board(['***21**', '412453*',\
'423145*', '*5?3215', '*35214*', '*41532*', '*2*1***'])
False
"""
for row in board:
if "?" in row:
return False
return True
def check_uniqueness_in_rows(board: list) -> bool:
"""
Check buildings of unique height in each row.
Return True if buildings in a row have unique length,
False otherwise.
>>> check_uniqueness_in_rows(['***21**', '412453*', '423145*',\
'*543215', '*35214*', '*41532*', '*2*1***'])
True
>>> check_uniqueness_in_rows(['***21**', '452453*', '423145*',\
'*543215', '*35214*', '*41532*', '*2*1***'])
False
>>> check_uniqueness_in_rows(['***21**', '412453*', '423145*',\
'*553215', '*35214*', '*41532*', '*2*1***'])
False
"""
# We chop each row
for row in board[1:-1]:
elements_int = []
for elem in row[1:-1]:
# If element can't be converted to int, it is skipped
try:
if int(elem) in elements_int:
return False
else:
elements_int.append(int(elem))
except:
continue
return True
def check_horizontal_visibility(board: list) -> bool:
"""
Check row-wise visibility (left-right and vice versa)
Return True if all horizontal hints are satisfiable,
i.e., for line 412453* , hint is 4, and 1245 are the four buildings
that could be observed from the hint looking to the right.
>>> check_horizontal_visibility(['***21**', '412453*', '423145*',\
'*543215', '*35214*', '*41532*', '*2*1***'])
True
>>> check_horizontal_visibility(['***21**', '452453*', '423145*',\
'*543215', '*35214*', '*41532*', '*2*1***'])
False
>>> check_horizontal_visibility(['***21**', '452413*', '423145*',\
'*543215', '*35214*', '*41532*', '*2*1***'])
False
"""
# Our right hint(default=*)
right_req = "*"
for row in board[1:-1]:
# We keep track of the max element and seen buildings
right_flag = 0
max_elem_right = 0
# We skip if there's no hint
if row[0] == "*":
continue
else:
right_req = int(row[0])
for elem in row[1:-1]:
# Check if the following element is bigger
if int(elem) > max_elem_right:
max_elem_right = int(elem)
right_flag += 1
# If the hints aren't met, we return False
if right_flag != right_req:
return False
# Same code, another direction, rewritten for better readability
left_req = "*"
for row in board[1:-1]:
left_flag = 0
max_elem_left = 0
if row[-1] == "*":
continue
else:
left_req = int(row[-1])
for elem in row[1:-1][::-1]:
if int(elem) > max_elem_left:
max_elem_left = int(elem)
left_flag += 1
# print('left ', right_flag, right_req)
if left_flag != left_req:
return False
return True
def check_columns(board: list) -> bool:
"""
Check column-wise compliance of the board for
uniqueness (buildings of unique height)
and visibility (top-bottom and vice versa).
Same as for horizontal cases, but aggregated in one
function for vertical case, i.e. columns.
>>> check_columns(['***21**', '412453*', '423145*',\
'*543215', '*35214*', '*41532*', '*2*1***'])
True
>>> check_columns(['***21**', '412453*', '423145*',\
'*543215', '*35214*', '*41232*', '*2*1***'])
False
>>> check_columns(['***21**', '412553*', '423145*',\
'*543215', '*35214*', '*41532*', '*2*1***'])
False
"""
new_lst = []
# Flip and check horisontally
for i, row in enumerate(board):
new_elem = ""
for j, _ in enumerate(row):
new_elem += board[j][i]
new_lst.append(new_elem)
if check_uniqueness_in_rows(new_lst) and check_not_finished_board(new_lst):
return check_horizontal_visibility(new_lst)
return False
def check_skyscrapers(input_path: str) -> bool:
"""
Main function to check the status of skyscraper game board.
Return True if the board status is compliant with the rules,
False otherwise.
"""
board = read_input(input_path)
# If everything is met return True
if (
check_horizontal_visibility(board)
and check_columns(board)
and check_uniqueness_in_rows(board)
and check_not_finished_board(board)
):
return True
return False
if __name__ == "__main__":
import doctest
print(doctest.testmod())
|
left_to_right_check
|
dolt_transaction_commit_test.go
|
// Copyright 2021 Dolthub, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package enginetest
import (
"context"
"testing"
"github.com/dolthub/go-mysql-server/enginetest"
"github.com/dolthub/go-mysql-server/sql"
"github.com/stretchr/testify/require"
"github.com/dolthub/dolt/go/libraries/doltcore/doltdb"
"github.com/dolthub/dolt/go/store/types"
)
// TODO: we need tests for manual DOLT_COMMIT as well, but that's difficult with the way that functions are resolved
// in the engine.
func TestDoltTransactionCommitOneClient(t *testing.T) {
if types.IsFormat_DOLT_1(types.Format_Default) {
t.Skip()
}
// In this test, we're setting only one client to match transaction commits to dolt commits.
// Autocommit is disabled for the enabled client, as it's the recommended way to use this feature.
harness := newDoltHarness(t)
enginetest.TestTransactionScript(t, harness, enginetest.TransactionTest{
Name: "dolt commit on transaction commit one client",
SetUpScript: []string{
"CREATE TABLE x (y BIGINT PRIMARY KEY, z BIGINT);",
"INSERT INTO x VALUES (1,1);",
},
Assertions: []enginetest.ScriptTestAssertion{
{
Query: "/* client a */ SET @@autocommit=0;",
Expected: []sql.Row{{}},
},
// start transaction implicitly commits the current transaction, so we have to do so before we turn on dolt commits
{
Query: "/* client a */ START TRANSACTION;",
Expected: []sql.Row{},
},
{
Query: "/* client b */ START TRANSACTION;",
Expected: []sql.Row{},
},
{
Query: "/* client a */ SET @@dolt_transaction_commit=1;",
Expected: []sql.Row{{}},
},
{
Query: "/* client a */ SET @initial_head=@@mydb_head;",
Expected: []sql.Row{{}},
},
{
Query: "/* client b */ SET @initial_head=@@mydb_head;",
Expected: []sql.Row{{}},
},
{
Query: "/* client a */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{true}},
},
{
Query: "/* client b */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{true}},
},
{
Query: "/* client a */ INSERT INTO x VALUES (2,2);",
Expected: []sql.Row{{sql.NewOkResult(1)}},
},
{
Query: "/* client b */ INSERT INTO x VALUES (3,3);",
Expected: []sql.Row{{sql.NewOkResult(1)}},
},
{
Query: "/* client a */ SELECT * FROM x ORDER BY y;",
Expected: []sql.Row{{1, 1}, {2, 2}},
},
{
Query: "/* client b */ SELECT * FROM x ORDER BY y;",
Expected: []sql.Row{{1, 1}, {3, 3}},
},
{
Query: "/* client a */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{true}},
},
{
Query: "/* client b */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{true}},
},
{
Query: "/* client b */ COMMIT;",
Expected: []sql.Row{},
},
{
Query: "/* client a */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{true}},
},
{
Query: "/* client b */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{true}},
},
{
Query: "/* client a */ COMMIT;",
Expected: []sql.Row{},
},
{
Query: "/* client a */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{false}},
},
{
Query: "/* client b */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{false}},
},
{
Query: "/* client a */ SELECT * FROM x ORDER BY y;",
Expected: []sql.Row{{1, 1}, {2, 2}, {3, 3}},
},
{
Query: "/* client b */ SELECT * FROM x ORDER BY y;",
Expected: []sql.Row{{1, 1}, {2, 2}, {3, 3}},
},
{
Query: "/* client a */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{false}},
},
{
Query: "/* client b */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{false}},
},
{
Query: "/* client c */ SELECT * FROM x ORDER BY y;",
Expected: []sql.Row{{1, 1}, {2, 2}, {3, 3}},
},
},
})
db := harness.databases[0].GetDoltDB()
cs, err := doltdb.NewCommitSpec("HEAD")
require.NoError(t, err)
headRefs, err := db.GetHeadRefs(context.Background())
require.NoError(t, err)
commit, err := db.Resolve(context.Background(), cs, headRefs[0])
require.NoError(t, err)
cm, err := commit.GetCommitMeta(context.Background())
require.NoError(t, err)
require.Contains(t, cm.Description, "Transaction commit")
as, err := doltdb.NewAncestorSpec("~1")
require.NoError(t, err)
initialCommit, err := commit.GetAncestor(context.Background(), as)
require.NoError(t, err)
icm, err := initialCommit.GetCommitMeta(context.Background())
require.NoError(t, err)
require.Equal(t, "Initialize data repository", icm.Description)
}
func TestDoltTransactionCommitTwoClients(t *testing.T) {
if types.IsFormat_DOLT_1(types.Format_Default) {
t.Skip()
}
// In this test, we're setting both clients to match transaction commits to dolt commits.
// Autocommit is disabled, as it's the recommended way to use this feature.
harness := newDoltHarness(t)
enginetest.TestTransactionScript(t, harness, enginetest.TransactionTest{
Name: "dolt commit on transaction commit two clients",
SetUpScript: []string{
"CREATE TABLE x (y BIGINT PRIMARY KEY, z BIGINT);",
"INSERT INTO x VALUES (1,1);",
},
Assertions: []enginetest.ScriptTestAssertion{
{
Query: "/* client a */ SET @@autocommit=0;",
Expected: []sql.Row{{}},
},
{
Query: "/* client b */ SET @@autocommit=0;",
Expected: []sql.Row{{}},
},
// start transaction implicitly commits the current transaction, so we have to do so before we turn on dolt commits
{
Query: "/* client a */ START TRANSACTION;",
Expected: []sql.Row{},
},
{
Query: "/* client b */ START TRANSACTION;",
Expected: []sql.Row{},
},
{
Query: "/* client a */ SET @@dolt_transaction_commit=1;",
Expected: []sql.Row{{}},
},
{
Query: "/* client b */ SET @@dolt_transaction_commit=1;",
Expected: []sql.Row{{}},
},
{
Query: "/* client a */ SET @initial_head=@@mydb_head;",
Expected: []sql.Row{{}},
},
{
Query: "/* client b */ SET @initial_head=@@mydb_head;",
Expected: []sql.Row{{}},
},
{
Query: "/* client a */ INSERT INTO x VALUES (2,2);",
Expected: []sql.Row{{sql.NewOkResult(1)}},
},
{
Query: "/* client b */ INSERT INTO x VALUES (3,3);",
Expected: []sql.Row{{sql.NewOkResult(1)}},
},
{
Query: "/* client a */ SELECT * FROM x ORDER BY y;",
Expected: []sql.Row{{1, 1}, {2, 2}},
},
{
Query: "/* client b */ SELECT * FROM x ORDER BY y;",
Expected: []sql.Row{{1, 1}, {3, 3}},
},
{
Query: "/* client a */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{true}},
},
{
Query: "/* client b */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{true}},
},
{
Query: "/* client b */ COMMIT;",
Expected: []sql.Row{},
},
{
Query: "/* client a */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{true}},
},
{
Query: "/* client a */ COMMIT;",
Expected: []sql.Row{},
},
{
Query: "/* client a */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{false}},
},
{
Query: "/* client b */ SELECT @@mydb_head like @initial_head;",
Expected: []sql.Row{{false}},
},
{
Query: "/* client a */ SELECT * FROM x ORDER BY y;",
Expected: []sql.Row{{1, 1}, {2, 2}, {3, 3}},
},
{
Query: "/* client b */ SELECT * FROM x ORDER BY y;",
Expected: []sql.Row{{1, 1}, {2, 2}, {3, 3}},
},
{
Query: "/* client c */ SELECT * FROM x ORDER BY y;",
Expected: []sql.Row{{1, 1}, {2, 2}, {3, 3}},
},
},
})
db := harness.databases[0].GetDoltDB()
cs, err := doltdb.NewCommitSpec("HEAD")
require.NoError(t, err)
headRefs, err := db.GetHeadRefs(context.Background())
require.NoError(t, err)
commit2, err := db.Resolve(context.Background(), cs, headRefs[0])
require.NoError(t, err)
cm2, err := commit2.GetCommitMeta(context.Background())
require.NoError(t, err)
require.Contains(t, cm2.Description, "Transaction commit")
as, err := doltdb.NewAncestorSpec("~1")
require.NoError(t, err)
commit1, err := commit2.GetAncestor(context.Background(), as)
require.NoError(t, err)
cm1, err := commit1.GetCommitMeta(context.Background())
require.NoError(t, err)
require.Contains(t, cm1.Description, "Transaction commit")
commit0, err := commit1.GetAncestor(context.Background(), as)
require.NoError(t, err)
cm0, err := commit0.GetCommitMeta(context.Background())
require.NoError(t, err)
require.Equal(t, "Initialize data repository", cm0.Description)
}
func TestDoltTransactionCommitAutocommit(t *testing.T) {
if types.IsFormat_DOLT_1(types.Format_Default) {
t.Skip()
}
// In this test, each insertion from both clients cause a commit as autocommit is enabled.
// Not the recommended way to use the feature, but it's permitted.
harness := newDoltHarness(t)
enginetest.TestTransactionScript(t, harness, enginetest.TransactionTest{
Name: "dolt commit with autocommit",
SetUpScript: []string{
"CREATE TABLE x (y BIGINT PRIMARY KEY, z BIGINT);",
"INSERT INTO x VALUES (1,1);",
},
Assertions: []enginetest.ScriptTestAssertion{
// these SET statements currently commit a transaction (since autocommit is on)
{
Query: "/* client a */ SET @@dolt_transaction_commit=1;",
Expected: []sql.Row{{}},
},
{
Query: "/* client b */ SET @@dolt_transaction_commit=1;",
Expected: []sql.Row{{}},
},
{
Query: "/* client a */ INSERT INTO x VALUES (2,2);",
Expected: []sql.Row{{sql.NewOkResult(1)}},
},
{
Query: "/* client b */ INSERT INTO x VALUES (3,3);",
Expected: []sql.Row{{sql.NewOkResult(1)}},
},
{
Query: "/* client a */ SELECT * FROM x ORDER BY y;",
Expected: []sql.Row{{1, 1}, {2, 2}, {3, 3}},
},
{
Query: "/* client b */ SELECT * FROM x ORDER BY y;",
Expected: []sql.Row{{1, 1}, {2, 2}, {3, 3}},
},
{
Query: "/* client c */ SELECT * FROM x ORDER BY y;",
Expected: []sql.Row{{1, 1}, {2, 2}, {3, 3}},
},
},
})
db := harness.databases[0].GetDoltDB()
cs, err := doltdb.NewCommitSpec("HEAD")
require.NoError(t, err)
headRefs, err := db.GetHeadRefs(context.Background())
require.NoError(t, err)
commit3, err := db.Resolve(context.Background(), cs, headRefs[0])
require.NoError(t, err)
cm3, err := commit3.GetCommitMeta(context.Background())
require.NoError(t, err)
require.Contains(t, cm3.Description, "Transaction commit")
as, err := doltdb.NewAncestorSpec("~1")
require.NoError(t, err)
commit2, err := commit3.GetAncestor(context.Background(), as)
require.NoError(t, err)
cm2, err := commit2.GetCommitMeta(context.Background())
require.NoError(t, err)
require.Contains(t, cm2.Description, "Transaction commit")
commit1, err := commit2.GetAncestor(context.Background(), as)
require.NoError(t, err)
cm1, err := commit1.GetCommitMeta(context.Background())
require.NoError(t, err)
require.Equal(t, "Transaction commit", cm1.Description)
commit0, err := commit1.GetAncestor(context.Background(), as)
require.NoError(t, err)
cm0, err := commit0.GetCommitMeta(context.Background())
require.NoError(t, err)
require.Equal(t, "Initialize data repository", cm0.Description)
}
func TestDoltTransactionCommitLateFkResolution(t *testing.T)
|
{
if types.IsFormat_DOLT_1(types.Format_Default) {
t.Skip()
}
harness := newDoltHarness(t)
enginetest.TestTransactionScript(t, harness, enginetest.TransactionTest{
Name: "delayed foreign key resolution with transaction commits",
SetUpScript: []string{
"SET foreign_key_checks=0;",
"CREATE TABLE child (pk BIGINT PRIMARY KEY, v1 BIGINT, CONSTRAINT fk_late FOREIGN KEY (v1) REFERENCES parent (pk));",
"SET foreign_key_checks=1;",
"CREATE TABLE parent (pk BIGINT PRIMARY KEY);",
"INSERT INTO parent VALUES (1), (2);",
},
Assertions: []enginetest.ScriptTestAssertion{
{
Query: "/* client a */ SET @@autocommit=0;",
Expected: []sql.Row{{}},
},
{
Query: "/* client b */ SET @@autocommit=0;",
Expected: []sql.Row{{}},
},
{
Query: "/* client a */ START TRANSACTION;",
Expected: []sql.Row{},
},
{
Query: "/* client b */ START TRANSACTION;",
Expected: []sql.Row{},
},
{
Query: "/* client a */ INSERT INTO child VALUES (1, 1);",
Expected: []sql.Row{{sql.NewOkResult(1)}},
},
{
Query: "/* client b */ INSERT INTO child VALUES (2, 2);",
Expected: []sql.Row{{sql.NewOkResult(1)}},
},
{
Query: "/* client a */ COMMIT;",
Expected: []sql.Row{},
},
{
Query: "/* client b */ COMMIT;",
Expected: []sql.Row{},
},
{
Query: "/* client a */ SELECT * FROM child ORDER BY pk;",
Expected: []sql.Row{{1, 1}, {2, 2}},
},
{
Query: "/* client b */ SELECT * FROM child ORDER BY pk;",
Expected: []sql.Row{{1, 1}, {2, 2}},
},
{ // This uses the index, which is automatically created by the late fk resolution, so it's also tested here
Query: "/* client a */ SELECT * FROM child WHERE v1 > 0 ORDER BY pk;",
Expected: []sql.Row{{1, 1}, {2, 2}},
},
{ // This uses the index, which is automatically created by the late fk resolution, so it's also tested here
Query: "/* client b */ SELECT * FROM child WHERE v1 > 0 ORDER BY pk;",
Expected: []sql.Row{{1, 1}, {2, 2}},
},
{
Query: "/* client a */ INSERT INTO child VALUES (3, 3);",
ExpectedErr: sql.ErrForeignKeyChildViolation,
},
{
Query: "/* client b */ INSERT INTO child VALUES (3, 3);",
ExpectedErr: sql.ErrForeignKeyChildViolation,
},
},
})
}
|
|
patricia_sparse_test.go
|
// Copyright (c) 2014 The go-patricia AUTHORS
//
// Use of this source code is governed by The MIT License
// that can be found in the LICENSE file.
package patricia
import (
"bytes"
"errors"
"fmt"
"strings"
"testing"
)
const (
success = true
failure = false
)
type testData struct {
key string
value interface{}
retVal bool
}
// Tests -----------------------------------------------------------------------
func TestTrie_InsertDifferentPrefixes(t *testing.T) {
trie := NewTrie()
data := []testData{
{"Pepaneeeeeeeeeeeeee", "Pepan Zdepan", success},
{"Honzooooooooooooooo", "Honza Novak", success},
{"Jenikuuuuuuuuuuuuuu", "Jenik Poustevnicek", success},
}
for _, v := range data {
t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal)
if ok := trie.Insert(Prefix(v.key), v.value); ok != v.retVal {
t.Errorf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
}
func TestTrie_InsertDuplicatePrefixes(t *testing.T) {
trie := NewTrie()
data := []testData{
{"Pepan", "Pepan Zdepan", success},
{"Pepan", "Pepan Zdepan", failure},
}
for _, v := range data {
t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal)
if ok := trie.Insert(Prefix(v.key), v.value); ok != v.retVal {
t.Errorf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
}
func TestTrie_InsertVariousPrefixes(t *testing.T) {
trie := NewTrie()
data := []testData{
{"Pepan", "Pepan Zdepan", success},
{"Pepin", "Pepin Omacka", success},
{"Honza", "Honza Novak", success},
{"Jenik", "Jenik Poustevnicek", success},
{"Pepan", "Pepan Dupan", failure},
{"Karel", "Karel Pekar", success},
{"Jenik", "Jenik Poustevnicek", failure},
{"Pepanek", "Pepanek Zemlicka", success},
}
for _, v := range data {
t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal)
if ok := trie.Insert(Prefix(v.key), v.value); ok != v.retVal {
t.Errorf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
}
func TestTrie_InsertAndMatchPrefix(t *testing.T) {
trie := NewTrie()
t.Log("INSERT prefix=by week")
trie.Insert(Prefix("by week"), 2)
t.Log("INSERT prefix=by")
trie.Insert(Prefix("by"), 1)
if !trie.Match(Prefix("by")) {
t.Error("MATCH prefix=by, expected=true, got=false")
}
}
func TestTrie_SetGet(t *testing.T) {
trie := NewTrie()
data := []testData{
{"Pepan", "Pepan Zdepan", success},
{"Pepin", "Pepin Omacka", success},
{"Honza", "Honza Novak", success},
{"Jenik", "Jenik Poustevnicek", success},
{"Pepan", "Pepan Dupan", failure},
{"Karel", "Karel Pekar", success},
{"Jenik", "Jenik Poustevnicek", failure},
{"Pepanek", "Pepanek Zemlicka", success},
}
for _, v := range data {
t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal)
if ok := trie.Insert(Prefix(v.key), v.value); ok != v.retVal {
t.Errorf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
for _, v := range data {
t.Logf("SET %q to 10", v.key)
trie.Set(Prefix(v.key), 10)
}
for _, v := range data {
value := trie.Get(Prefix(v.key))
t.Logf("GET %q => %v", v.key, value)
if value.(int) != 10 {
t.Errorf("Unexpected return value, %v != 10", value)
}
}
if value := trie.Get(Prefix("random crap")); value != nil {
t.Errorf("Unexpected return value, %v != <nil>", value)
}
}
func TestTrie_Match(t *testing.T) {
trie := NewTrie()
data := []testData{
{"Pepan", "Pepan Zdepan", success},
{"Pepin", "Pepin Omacka", success},
{"Honza", "Honza Novak", success},
{"Jenik", "Jenik Poustevnicek", success},
{"Pepan", "Pepan Dupan", failure},
{"Karel", "Karel Pekar", success},
{"Jenik", "Jenik Poustevnicek", failure},
{"Pepanek", "Pepanek Zemlicka", success},
}
for _, v := range data {
t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal)
if ok := trie.Insert(Prefix(v.key), v.value); ok != v.retVal {
t.Errorf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
for _, v := range data {
matched := trie.Match(Prefix(v.key))
t.Logf("MATCH %q => %v", v.key, matched)
if !matched {
t.Errorf("Inserted key %q was not matched", v.key)
}
}
if trie.Match(Prefix("random crap")) {
t.Errorf("Key that was not inserted matched: %q", "random crap")
}
}
func TestTrie_MatchFalsePositive(t *testing.T) {
trie := NewTrie()
if ok := trie.Insert(Prefix("A"), 1); !ok {
t.Fatal("INSERT prefix=A, item=1 not ok")
}
resultMatchSubtree := trie.MatchSubtree(Prefix("A extra"))
resultMatch := trie.Match(Prefix("A extra"))
if resultMatchSubtree != false {
t.Error("MatchSubtree returned false positive")
}
if resultMatch != false {
t.Error("Match returned false positive")
}
}
func TestTrie_MatchSubtree(t *testing.T) {
trie := NewTrie()
data := []testData{
{"Pepan", "Pepan Zdepan", success},
{"Pepin", "Pepin Omacka", success},
{"Honza", "Honza Novak", success},
{"Jenik", "Jenik Poustevnicek", success},
{"Pepan", "Pepan Dupan", failure},
{"Karel", "Karel Pekar", success},
{"Jenik", "Jenik Poustevnicek", failure},
{"Pepanek", "Pepanek Zemlicka", success},
}
for _, v := range data {
t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal)
if ok := trie.Insert(Prefix(v.key), v.value); ok != v.retVal {
t.Errorf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
for _, v := range data {
key := Prefix(v.key[:3])
matched := trie.MatchSubtree(key)
t.Logf("MATCH_SUBTREE %q => %v", key, matched)
if !matched {
t.Errorf("Subtree %q was not matched", v.key)
}
}
}
func
|
(t *testing.T) {
trie := NewTrie()
data := []testData{
{"Pepa", 0, success},
{"Pepa Zdepa", 1, success},
{"Pepa Kuchar", 2, success},
{"Honza", 3, success},
{"Jenik", 4, success},
}
for _, v := range data {
t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal)
if ok := trie.Insert([]byte(v.key), v.value); ok != v.retVal {
t.Fatalf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
if err := trie.Visit(func(prefix Prefix, item Item) error {
name := data[item.(int)].key
t.Logf("VISITING prefix=%q, item=%v", prefix, item)
if !strings.HasPrefix(string(prefix), name) {
t.Errorf("Unexpected prefix encountered, %q not a prefix of %q", prefix, name)
}
return nil
}); err != nil {
t.Fatal(err)
}
}
func TestTrie_VisitSkipSubtree(t *testing.T) {
trie := NewTrie()
data := []testData{
{"Pepa", 0, success},
{"Pepa Zdepa", 1, success},
{"Pepa Kuchar", 2, success},
{"Honza", 3, success},
{"Jenik", 4, success},
}
for _, v := range data {
t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal)
if ok := trie.Insert([]byte(v.key), v.value); ok != v.retVal {
t.Fatalf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
if err := trie.Visit(func(prefix Prefix, item Item) error {
t.Logf("VISITING prefix=%q, item=%v", prefix, item)
if item.(int) == 0 {
t.Logf("SKIP %q", prefix)
return SkipSubtree
}
if strings.HasPrefix(string(prefix), "Pepa") {
t.Errorf("Unexpected prefix encountered, %q", prefix)
}
return nil
}); err != nil {
t.Fatal(err)
}
}
func TestTrie_VisitReturnError(t *testing.T) {
trie := NewTrie()
data := []testData{
{"Pepa", 0, success},
{"Pepa Zdepa", 1, success},
{"Pepa Kuchar", 2, success},
{"Honza", 3, success},
{"Jenik", 4, success},
}
for _, v := range data {
t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal)
if ok := trie.Insert([]byte(v.key), v.value); ok != v.retVal {
t.Fatalf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
someErr := errors.New("Something exploded")
if err := trie.Visit(func(prefix Prefix, item Item) error {
t.Logf("VISITING prefix=%q, item=%v", prefix, item)
if item.(int) == 3 {
return someErr
}
if item.(int) != 3 {
t.Errorf("Unexpected prefix encountered, %q", prefix)
}
return nil
}); err != nil && err != someErr {
t.Fatal(err)
}
}
func TestTrie_VisitSubtree(t *testing.T) {
trie := NewTrie()
data := []testData{
{"Pepa", 0, success},
{"Pepa Zdepa", 1, success},
{"Pepa Kuchar", 2, success},
{"Honza", 3, success},
{"Jenik", 4, success},
}
for _, v := range data {
t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal)
if ok := trie.Insert([]byte(v.key), v.value); ok != v.retVal {
t.Fatalf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
var counter int
subtreePrefix := []byte("Pep")
t.Log("VISIT Pep")
if err := trie.VisitSubtree(subtreePrefix, func(prefix Prefix, item Item) error {
t.Logf("VISITING prefix=%q, item=%v", prefix, item)
if !bytes.HasPrefix(prefix, subtreePrefix) {
t.Errorf("Unexpected prefix encountered, %q does not extend %q",
prefix, subtreePrefix)
}
if len(prefix) > len(data[item.(int)].key) {
t.Fatalf("Something is rather fishy here, prefix=%q", prefix)
}
counter++
return nil
}); err != nil {
t.Fatal(err)
}
if counter != 3 {
t.Error("Unexpected number of nodes visited")
}
}
func TestTrie_VisitPrefixes(t *testing.T) {
trie := NewTrie()
data := []testData{
{"P", 0, success},
{"Pe", 1, success},
{"Pep", 2, success},
{"Pepa", 3, success},
{"Pepa Zdepa", 4, success},
{"Pepa Kuchar", 5, success},
{"Honza", 6, success},
{"Jenik", 7, success},
}
for _, v := range data {
t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal)
if ok := trie.Insert([]byte(v.key), v.value); ok != v.retVal {
t.Fatalf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
var counter int
word := []byte("Pepa")
if err := trie.VisitPrefixes(word, func(prefix Prefix, item Item) error {
t.Logf("VISITING prefix=%q, item=%v", prefix, item)
if !bytes.HasPrefix(word, prefix) {
t.Errorf("Unexpected prefix encountered, %q is not a prefix of %q",
prefix, word)
}
counter++
return nil
}); err != nil {
t.Fatal(err)
}
if counter != 4 {
t.Error("Unexpected number of nodes visited")
}
}
func TestParticiaTrie_Delete(t *testing.T) {
trie := NewTrie()
data := []testData{
{"Pepan", "Pepan Zdepan", success},
{"Honza", "Honza Novak", success},
{"Jenik", "Jenik Poustevnicek", success},
}
for _, v := range data {
t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal)
if ok := trie.Insert([]byte(v.key), v.value); ok != v.retVal {
t.Fatalf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
for _, v := range data {
t.Logf("DELETE word=%v, success=%v", v.key, v.retVal)
if ok := trie.Delete([]byte(v.key)); ok != v.retVal {
t.Errorf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
}
func TestParticiaTrie_DeleteLeakageSparse(t *testing.T) {
trie := NewTrie()
data := []testData{
{"Pepan", "Pepan Zdepan", success},
{"Honza", "Honza Novak", success},
{"Jenik", "Jenik Poustevnicek", success},
}
oldBytes := heapAllocatedBytes()
for i := 0; i < 10000; i++ {
for _, v := range data {
if ok := trie.Insert([]byte(v.key), v.value); ok != v.retVal {
t.Fatalf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
for _, v := range data {
if ok := trie.Delete([]byte(v.key)); ok != v.retVal {
t.Errorf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
}
if newBytes := heapAllocatedBytes(); newBytes > oldBytes+overhead {
t.Logf("Size=%d, Total=%d, Trie state:\n%s\n", trie.size(), trie.total(), trie.dump())
t.Errorf("Heap space leak, grew %d bytes (from %d to %d)\n", newBytes-oldBytes, oldBytes, newBytes)
}
}
func TestParticiaTrie_DeleteNonExistent(t *testing.T) {
trie := NewTrie()
insertData := []testData{
{"Pepan", "Pepan Zdepan", success},
{"Honza", "Honza Novak", success},
{"Jenik", "Jenik Poustevnicek", success},
}
deleteData := []testData{
{"Pepan", "Pepan Zdepan", success},
{"Honza", "Honza Novak", success},
{"Pepan", "Pepan Zdepan", failure},
{"Jenik", "Jenik Poustevnicek", success},
{"Honza", "Honza Novak", failure},
}
for _, v := range insertData {
t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal)
if ok := trie.Insert([]byte(v.key), v.value); ok != v.retVal {
t.Fatalf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
for _, v := range deleteData {
t.Logf("DELETE word=%v, success=%v", v.key, v.retVal)
if ok := trie.Delete([]byte(v.key)); ok != v.retVal {
t.Errorf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
}
func TestParticiaTrie_DeleteSubtree(t *testing.T) {
trie := NewTrie()
insertData := []testData{
{"P", 0, success},
{"Pe", 1, success},
{"Pep", 2, success},
{"Pepa", 3, success},
{"Pepa Zdepa", 4, success},
{"Pepa Kuchar", 5, success},
{"Honza", 6, success},
{"Jenik", 7, success},
}
deleteData := []testData{
{"Pe", -1, success},
{"Pe", -1, failure},
{"Honzik", -1, failure},
{"Honza", -1, success},
{"Honza", -1, failure},
{"Pep", -1, failure},
{"P", -1, success},
{"Nobody", -1, failure},
{"", -1, success},
}
for _, v := range insertData {
t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal)
if ok := trie.Insert([]byte(v.key), v.value); ok != v.retVal {
t.Fatalf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
for _, v := range deleteData {
t.Logf("DELETE_SUBTREE prefix=%v, success=%v", v.key, v.retVal)
if ok := trie.DeleteSubtree([]byte(v.key)); ok != v.retVal {
t.Errorf("Unexpected return value, expected=%v, got=%v", v.retVal, ok)
}
}
}
/*
func TestTrie_Dump(t *testing.T) {
trie := NewTrie()
data := []testData{
{"Honda", nil, success},
{"Honza", nil, success},
{"Jenik", nil, success},
{"Pepan", nil, success},
{"Pepin", nil, success},
}
for i, v := range data {
if _, ok := trie.Insert([]byte(v.key), v.value); ok != v.retVal {
t.Logf("INSERT %v %v", v.key, v.value)
t.Fatalf("Unexpected return value, expected=%v, got=%v", i, ok)
}
}
dump := `
+--+--+ Hon +--+--+ da
| |
| +--+ za
|
+--+ Jenik
|
+--+ Pep +--+--+ an
|
+--+ in
`
var buf bytes.Buffer
trie.Dump(buf)
if !bytes.Equal(buf.Bytes(), dump) {
t.Logf("DUMP")
t.Fatalf("Unexpected dump generated, expected\n\n%v\ngot\n\n%v", dump, buf.String())
}
}
*/
func TestTrie_compact(t *testing.T) {
trie := NewTrie()
trie.Insert(Prefix("a"), 0)
trie.Insert(Prefix("ab"), 0)
trie.Insert(Prefix("abc"), 0)
trie.Insert(Prefix("abcd"), 0)
trie.Insert(Prefix("abcde"), 0)
trie.Insert(Prefix("abcdef"), 0)
trie.Insert(Prefix("abcdefg"), 0)
trie.Insert(Prefix("abcdefgi"), 0)
trie.Insert(Prefix("abcdefgij"), 0)
trie.Insert(Prefix("abcdefgijk"), 0)
trie.Delete(Prefix("abcdef"))
trie.Delete(Prefix("abcde"))
trie.Delete(Prefix("abcdefg"))
trie.Delete(Prefix("a"))
trie.Delete(Prefix("abc"))
trie.Delete(Prefix("ab"))
trie.Visit(func(prefix Prefix, item Item) error {
// 97 ~~ 'a',
for ch := byte(97); ch <= 107; ch++ {
if c := bytes.Count(prefix, []byte{ch}); c > 1 {
t.Errorf("%q appeared in %q %v times", ch, prefix, c)
}
}
return nil
})
}
func TestTrie_longestCommonPrefixLenght(t *testing.T) {
trie := NewTrie()
trie.prefix = []byte("1234567890")
switch {
case trie.longestCommonPrefixLength([]byte("")) != 0:
t.Fail()
case trie.longestCommonPrefixLength([]byte("12345")) != 5:
t.Fail()
case trie.longestCommonPrefixLength([]byte("123789")) != 3:
t.Fail()
case trie.longestCommonPrefixLength([]byte("12345678901")) != 10:
t.Fail()
}
}
// Examples --------------------------------------------------------------------
func ExampleTrie() {
// Create a new tree.
trie := NewTrie()
// Insert some items.
trie.Insert(Prefix("Pepa Novak"), 1)
trie.Insert(Prefix("Pepa Sindelar"), 2)
trie.Insert(Prefix("Karel Macha"), 3)
trie.Insert(Prefix("Karel Hynek Macha"), 4)
// Just check if some things are present in the tree.
key := Prefix("Pepa Novak")
fmt.Printf("%q present? %v\n", key, trie.Match(key))
key = Prefix("Karel")
fmt.Printf("Anybody called %q here? %v\n", key, trie.MatchSubtree(key))
// Walk the tree.
trie.Visit(printItem)
// "Karel Hynek Macha": 4
// "Karel Macha": 3
// "Pepa Novak": 1
// "Pepa Sindelar": 2
// Walk a subtree.
trie.VisitSubtree(Prefix("Pepa"), printItem)
// "Pepa Novak": 1
// "Pepa Sindelar": 2
// Modify an item, then fetch it from the tree.
trie.Set(Prefix("Karel Hynek Macha"), 10)
key = Prefix("Karel Hynek Macha")
fmt.Printf("%q: %v\n", key, trie.Get(key))
// "Karel Hynek Macha": 10
// Walk prefixes.
prefix := Prefix("Karel Hynek Macha je kouzelnik")
trie.VisitPrefixes(prefix, printItem)
// "Karel Hynek Macha": 10
// Delete some items.
trie.Delete(Prefix("Pepa Novak"))
trie.Delete(Prefix("Karel Macha"))
// Walk again.
trie.Visit(printItem)
// "Karel Hynek Macha": 10
// "Pepa Sindelar": 2
// Delete a subtree.
trie.DeleteSubtree(Prefix("Pepa"))
// Print what is left.
trie.Visit(printItem)
// "Karel Hynek Macha": 10
// Output:
// "Pepa Novak" present? true
// Anybody called "Karel" here? true
// "Karel Hynek Macha": 4
// "Karel Macha": 3
// "Pepa Novak": 1
// "Pepa Sindelar": 2
// "Pepa Novak": 1
// "Pepa Sindelar": 2
// "Karel Hynek Macha": 10
// "Karel Hynek Macha": 10
// "Karel Hynek Macha": 10
// "Pepa Sindelar": 2
// "Karel Hynek Macha": 10
}
// Helpers ---------------------------------------------------------------------
func printItem(prefix Prefix, item Item) error {
fmt.Printf("%q: %v\n", prefix, item)
return nil
}
|
TestTrie_Visit
|
McFee_Functions.py
|
__author__ = 'Chris Jones'
import numpy as np
from scipy.integrate import quad
import heat_xfer as hx
# Values for default parameters
default_rrr = 150
default_I = 200.
default_res_increase = 0.
default_rho_273 = 1.71e-6
linspace_num = 500
def get_kr_ave1(u_l, u_h):
"""
Returns the average k*r value from u_l to u_h using integration
:param u_l:
:param u_h:
:return:
"""
if u_l == u_h:
return 0.
kr_func = lambda u: hx.therm_cond_cu(u, rrr=default_rrr) * hx.resistivity_BG(u)
return (1 / (u_h - u_l)) * quad(kr_func, u_l, u_h)[0]
def get_qn1(u_l, u_h, I): # The heat flow at the cold end
"""
Returns the nth heat flow value in watts using integration
:param u_l:
:param u_h:
:param I:
:return:
"""
return I * np.sqrt(2 * get_kr_ave1(u_l, u_h) * (u_h - u_l))
def get_qps1(u, u_l, I=default_I):
qps = np.zeros(len(u))
for i in range(len(u)):
qps[i] = get_qn1(u_l, u[i], I)
return qps
def get_kr_ave2(u_l, u_h, du, r_increase=default_res_increase):
"""
Returns the average k*r value from u_l to u_h using summation
:param u_l:
:param u_h:
:param du:
:return:
"""
if u_l == u_h:
return 0.
# x = np.arange(u_h, u_l - du, -du)
x = np.linspace(u_h, u_l, linspace_num)
return np.sum(get_kp(x)*get_rp(x, r_increase=r_increase)) / float(len(x))
def get_qn2(u_l, u_h, I, du, r_increase=default_res_increase):
"""
Returns the nth heat flow value in watts using summation
:param u_l: cold end temperature in Kelvin
:type u_l: float
:param u_h: warm end temperature in Kelvin
:type u_h: float
:param I: conductor current
:type I: float
:param du: temperature step size
:type du: float
:returns: heat load seen at cold end of conductor
:rtype: float
"""
return I * np.sqrt(2. * get_kr_ave2(u_l, u_h, du, r_increase=r_increase) * (u_h - u_l))
def get_qps2(u, du, I=default_I):
qps = np.zeros(len(u))
u_max = max(u)
for i in range(len(u)):
qps[i] = get_qn2(u_max, u[i], I, du)
return qps
def
|
(cell_temps, r_increase=default_res_increase):
"""
For a given cell temperature range, return the sum of the k*r products.
Used by 'get_la_ratio()'
:param cell_temps:
:type cell_temps: numpy.ndarray
:return: cumulative sum of k*r values
:rtype: numpy.ndarray
"""
return np.cumsum(get_kp(cell_temps)*get_rp(cell_temps, r_increase=r_increase))
def get_qn3(kr_sum, du, I):
"""
Returns the value of Q from a range. Meant to be used by 'get_la_ratio()'
:param kr_sum:
:param du:
:param I:
:return:
:rtype:
"""
return I * np.sqrt(2. * kr_sum * du)
def get_la_ratio(u, du, I, r_increase=default_res_increase):
"""
Given a temperature range and current, returns the optimized length to area ratio of the conductor.
:param r_increase:
:param u:
:param du:
:param I:
:return:
"""
ct = get_cell_temps(u)
kr = get_kr_cumsum(ct, r_increase=r_increase)
sp = get_sp(ct, r_increase=r_increase)
qs = get_qn3(kr, du, I)
ratio = 0.
for i in range(len(ct) - 1):
ratio += (sp[i] - sp[i+1]) * qs[i]
ratio += (sp[-1] * qs[-1])
return ratio / I ** 2.
def get_kp(u, rrr=default_rrr):
"""
Given a temperature, or array of temperatures, returns the thermal conductivity.
:param u:
:param rrr:
:return:
"""
return hx.therm_cond_cu(u, rrr)
def get_sp(u, rrr=default_rrr, rho273=default_rho_273, r_increase=default_res_increase):
"""
Given a temperature, or an array of temperatures, returns the electrical conductivity.
:param r_increase:
:param u:
:param rrr:
:param rho273:
:return:
"""
return 1. / (hx.resistivity_BG(u, rrr, rho273) + r_increase)
def get_rp(u, rrr=default_rrr, rho273=default_rho_273, r_increase=default_res_increase):
"""
Given a temperature, or an array of temperatures, returns the electrical resistivity.
:param u:
:param rrr:
:param rho273:
:return:
"""
return hx.resistivity_BG(u, rrr, rho273) + r_increase
def get_cell_temps(u):
"""
For a temperature array of length n >= 2, where the values represent temperatures at cell boundaries,
this function returns the average temperature for the cell in an n-1 length array.
:param u:
:return:
"""
temps = np.zeros(len(u)-1)
for i in range(len(temps)):
temps[i] = 0.5 * (u[i] + u[i + 1])
return temps
|
get_kr_cumsum
|
GDataTestHTTPServer.py
|
#!/usr/bin/python
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple server for testing the Objective-C GData Framework
This http server is for use by GDataServiceTest.m in testing
both authentication and object retrieval.
Requests to the path /accounts/ClientLogin are assumed to be
for login; other requests are for object retrieval
"""
import string
import cgi
import time
import os
import sys
import re
import mimetypes
import socket
from BaseHTTPServer import BaseHTTPRequestHandler
from BaseHTTPServer import HTTPServer
from optparse import OptionParser
class ServerTimeoutException(Exception):
pass
class
|
(HTTPServer):
"""HTTP server for testing network requests.
This server will throw an exception if it receives no connections for
several minutes. We use this to ensure that the server will be cleaned
up if something goes wrong during the unit testing.
"""
def get_request(self):
self.socket.settimeout(120.0)
result = None
while result is None:
try:
result = self.socket.accept()
except socket.timeout:
raise ServerTimeoutException
result[0].settimeout(None)
return result
class SimpleServer(BaseHTTPRequestHandler):
"""HTTP request handler for testing GData network requests.
This is an implementation of a request handler for BaseHTTPServer,
specifically designed for GData service code usage.
Normal requests for GET/POST/PUT simply retrieve the file from the
supplied path, starting in the current directory. A cookie called
TestCookie is set by the response header, with the value of the filename
requested.
DELETE requests always succeed.
Appending ?status=n results in a failure with status value n.
Paths ending in .auth have the .auth extension stripped, and must have
an authorization header of "GoogleLogin auth=GoodAuthToken" to succeed.
Paths ending in .authsub have the .authsub extension stripped, and must have
an authorization header of "AuthSub token=GoodAuthSubToken" to succeed.
Paths ending in .authwww have the .authwww extension stripped, and must have
an authorization header for GoodWWWUser:GoodWWWPassword to succeed.
Successful results have a Last-Modified header set; if that header's value
("thursday") is supplied in a request's "If-Modified-Since" header, the
result is 304 (Not Modified).
Requests to /accounts/ClientLogin will fail if supplied with a body
containing Passwd=bad. If they contain logintoken and logincaptcha values,
those must be logintoken=CapToken&logincaptch=good to succeed.
"""
def do_GET(self):
self.doAllRequests()
def do_POST(self):
self.doAllRequests()
def do_PUT(self):
self.doAllRequests()
def do_DELETE(self):
self.doAllRequests()
def doAllRequests(self):
# This method handles all expected incoming requests
#
# Requests to path /accounts/ClientLogin are assumed to be for signing in
#
# Other paths are for retrieving a local xml file. An .auth appended
# to an xml file path will require authentication (meaning the Authorization
# header must be present with the value "GoogleLogin auth=GoodAuthToken".)
# Delete commands succeed but return no data.
#
# GData override headers are supported.
#
# Any auth password is valid except "bad", which will fail, and "captcha",
# which will fail unless the authentication request's post string includes
# "logintoken=CapToken&logincaptcha=good"
# We will use a readable default result string since it should never show up
# in output
resultString = "default GDataTestServer result\n";
resultStatus = 0
headerType = "text/plain"
postString = ""
modifiedDate = "thursday" # clients should treat dates as opaque, generally
# auth queries and some GData queries include post data
postLength = int(self.headers.getheader("Content-Length", "0"));
if postLength > 0:
postString = self.rfile.read(postLength)
ifModifiedSince = self.headers.getheader("If-Modified-Since", "");
# retrieve the auth header
authorization = self.headers.getheader("Authorization", "")
# require basic auth if the file path ends with the string ".authwww"
# GoodWWWUser:GoodWWWPassword is base64 R29vZFdXV1VzZXI6R29vZFdXV1Bhc3N3b3Jk
if self.path.endswith(".authwww"):
if authorization != "Basic R29vZFdXV1VzZXI6R29vZFdXV1Bhc3N3b3Jk":
self.send_response(401)
self.send_header('WWW-Authenticate', "Basic realm='testrealm'")
self.send_header('Content-type', 'text/html')
self.end_headers()
return
self.path = self.path[:-8] # remove the .authwww at the end
# require Google auth if the file path ends with the string ".auth"
# or ".authsub"
if self.path.endswith(".auth"):
if authorization != "GoogleLogin auth=GoodAuthToken":
self.send_error(401,"Unauthorized: %s" % self.path)
return
self.path = self.path[:-5] # remove the .auth at the end
if self.path.endswith(".authsub"):
if authorization != "AuthSub token=GoodAuthSubToken":
self.send_error(401,"Unauthorized: %s" % self.path)
return
self.path = self.path[:-8] # remove the .authsub at the end
overrideHeader = self.headers.getheader("X-HTTP-Method-Override", "")
httpCommand = self.command
if httpCommand == "POST" and len(overrideHeader) > 0:
httpCommand = overrideHeader
try:
if self.path.endswith("/accounts/ClientLogin"):
#
# it's a sign-in attempt; it's good unless the password is "bad" or
# "captcha"
#
# use regular expression to find the password
password = ""
searchResult = re.search("(Passwd=)([^&\n]*)", postString)
if searchResult:
password = searchResult.group(2)
if password == "bad":
resultString = "Error=BadAuthentication\n"
resultStatus = 403
elif password == "captcha":
logintoken = ""
logincaptcha = ""
# use regular expressions to find the captcha token and answer
searchResult = re.search("(logintoken=)([^&\n]*)", postString);
if searchResult:
logintoken = searchResult.group(2)
searchResult = re.search("(logincaptcha=)([^&\n]*)", postString);
if searchResult:
logincaptcha = searchResult.group(2)
# if the captcha token is "CapToken" and the answer is "good"
# then it's a valid sign in
if (logintoken == "CapToken") and (logincaptcha == "good"):
resultString = "SID=GoodSID\nLSID=GoodLSID\nAuth=GoodAuthToken\n"
resultStatus = 200
else:
# incorrect captcha token or answer provided
resultString = ("Error=CaptchaRequired\nCaptchaToken=CapToken\n"
"CaptchaUrl=CapUrl\n")
resultStatus = 403
else:
# valid username/password
resultString = "SID=GoodSID\nLSID=GoodLSID\nAuth=GoodAuthToken\n"
resultStatus = 200
elif httpCommand == "DELETE":
#
# it's an object delete; read and return empty data
#
resultString = ""
resultStatus = 200
headerType = "text/plain"
else:
# queries that have something like "?status=456" should fail with the
# status code
searchResult = re.search("(status=)([0-9]+)", self.path)
if searchResult:
status = searchResult.group(2)
self.send_error(int(status),
"Test HTTP server status parameter: %s" % self.path)
return
# queries that have something like "?statusxml=456" should fail with the
# status code and structured XML response
searchResult = re.search("(statusxml=)([0-9]+)", self.path)
if searchResult:
status = searchResult.group(2)
self.send_response(int(status))
self.send_header("Content-type",
"application/vnd.google.gdata.error+xml")
self.end_headers()
resultString = ("<errors xmlns='http://schemas.google.com/g/2005'>"
"<error><domain>GData</domain><code>code_%s</code>"
"<internalReason>forced status error on path %s</internalReason>"
"<extendedHelp>http://help.com</extendedHelp>"
"<sendReport>http://report.com</sendReport></error>"
"</errors>" % (status, self.path))
self.wfile.write(resultString)
return
# if the client gave us back our modified date, then say there's no
# change in the response
if ifModifiedSince == modifiedDate:
self.send_response(304) # Not Modified
return
else:
#
# it's an object fetch; read and return the XML file
#
f = open("." + self.path)
resultString = f.read()
f.close()
resultStatus = 200
fileTypeInfo = mimetypes.guess_type("." + self.path)
headerType = fileTypeInfo[0] # first part of the tuple is mime type
self.send_response(resultStatus)
self.send_header("Content-type", headerType)
self.send_header("Last-Modified", modifiedDate)
# set TestCookie to equal the file name requested
cookieValue = os.path.basename("." + self.path)
self.send_header('Set-Cookie', 'TestCookie=%s' % cookieValue)
self.end_headers()
self.wfile.write(resultString)
except IOError:
self.send_error(404,"File Not Found: %s" % self.path)
def main():
try:
parser = OptionParser()
parser.add_option("-p", "--port", dest="port", help="Port to run server on",
type="int", default="80")
parser.add_option("-r", "--root", dest="root", help="Where to root server",
default=".")
(options, args) = parser.parse_args()
os.chdir(options.root)
server = HTTPTimeoutServer(("127.0.0.1", options.port), SimpleServer)
sys.stdout.write("started GDataTestServer.py...");
sys.stdout.flush();
server.serve_forever()
except KeyboardInterrupt:
print "^C received, shutting down server"
server.socket.close()
except ServerTimeoutException:
print "Too long since the last request, shutting down server"
server.socket.close()
if __name__ == "__main__":
main()
|
HTTPTimeoutServer
|
__init__.py
|
import logging
from .suncg_eval import do_suncg_evaluation
def suncg_evaluation(dataset, predictions, iou_thresh_eval, output_folder, box_only, epoch=None, is_train=None, eval_aug_thickness=None, **_):
|
logger = logging.getLogger("maskrcnn_benchmark.inference")
if box_only:
logger.warning("evaluation with box_only / RPN_Only")
logger.info("performing suncg evaluation")
return do_suncg_evaluation(
dataset=dataset,
predictions=predictions,
iou_thresh_eval=iou_thresh_eval,
output_folder=output_folder,
logger=logger,
epoch=epoch,
is_train = is_train,
eval_aug_thickness = eval_aug_thickness
)
|
|
mod.rs
|
pub mod csv;
pub mod serde;
use rust_decimal::Decimal;
|
#[track_caller]
pub fn assert_are_close(a: Decimal, b: Decimal) {
assert_eq!(
a.round_dp(DEFAULT_DECIMAL_PRECISION),
b.round_dp(DEFAULT_DECIMAL_PRECISION)
);
}
|
const DEFAULT_DECIMAL_PRECISION: u32 = 10;
|
kogitobuild.go
|
// Copyright 2020 Red Hat, Inc. and/or its affiliates
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package steps
import (
"fmt"
"path/filepath"
"github.com/cucumber/godog"
"github.com/kiegroup/kogito-cloud-operator/pkg/apis/app/v1beta1"
"github.com/kiegroup/kogito-cloud-operator/test/config"
"github.com/kiegroup/kogito-cloud-operator/test/framework"
"github.com/kiegroup/kogito-cloud-operator/test/steps/mappers"
"github.com/kiegroup/kogito-cloud-operator/test/types"
bddtypes "github.com/kiegroup/kogito-cloud-operator/test/types"
)
/*
DataTable for KogitoBuild:
| config | native | enabled/disabled |
| build-request | cpu/memory | value |
| build-limit | cpu/memory | value |
*/
func
|
(ctx *godog.ScenarioContext, data *Data) {
// Deploy steps
ctx.Step(`^Build (quarkus|springboot) example service "([^"]*)" with configuration:$`, data.buildExampleServiceWithConfiguration)
ctx.Step(`^Build binary (quarkus|springboot) service "([^"]*)" with configuration:$`, data.buildBinaryServiceWithConfiguration)
ctx.Step(`^Build binary (quarkus|springboot) local example service "([^"]*)" from target folder with configuration:$`, data.buildBinaryLocalExampleServiceFromTargetFolderWithConfiguration)
}
// Build service steps
func (data *Data) buildExampleServiceWithConfiguration(runtimeType, contextDir string, table *godog.Table) error {
buildHolder, err := getKogitoBuildConfiguredStub(data.Namespace, runtimeType, filepath.Base(contextDir), table)
if err != nil {
return err
}
buildHolder.KogitoBuild.Spec.Type = v1beta1.RemoteSourceBuildType
buildHolder.KogitoBuild.Spec.GitSource.URI = config.GetExamplesRepositoryURI()
buildHolder.KogitoBuild.Spec.GitSource.ContextDir = contextDir
if ref := config.GetExamplesRepositoryRef(); len(ref) > 0 {
buildHolder.KogitoBuild.Spec.GitSource.Reference = ref
}
return framework.DeployKogitoBuild(data.Namespace, framework.GetDefaultInstallerType(), buildHolder)
}
func (data *Data) buildBinaryServiceWithConfiguration(runtimeType, serviceName string, table *godog.Table) error {
buildHolder, err := getKogitoBuildConfiguredStub(data.Namespace, runtimeType, serviceName, table)
if err != nil {
return err
}
buildHolder.KogitoBuild.Spec.Type = v1beta1.BinaryBuildType
return framework.DeployKogitoBuild(data.Namespace, framework.GetDefaultInstallerType(), buildHolder)
}
func (data *Data) buildBinaryLocalExampleServiceFromTargetFolderWithConfiguration(runtimeType, serviceName string, table *godog.Table) error {
buildHolder, err := getKogitoBuildConfiguredStub(data.Namespace, runtimeType, serviceName, table)
if err != nil {
return err
}
buildHolder.KogitoBuild.Spec.Type = v1beta1.BinaryBuildType
buildHolder.BuiltBinaryFolder = fmt.Sprintf(`%s/%s/target`, data.KogitoExamplesLocation, serviceName)
return framework.DeployKogitoBuild(data.Namespace, framework.CLIInstallerType, buildHolder)
}
// Misc methods
// getKogitoBuildConfiguredStub Get KogitoBuildHolder initialized from table if provided
func getKogitoBuildConfiguredStub(namespace, runtimeType, serviceName string, table *godog.Table) (buildHolder *types.KogitoBuildHolder, err error) {
kogitoBuild := framework.GetKogitoBuildStub(namespace, runtimeType, serviceName)
kogitoRuntime := framework.GetKogitoRuntimeStub(namespace, runtimeType, serviceName, "")
buildHolder = &bddtypes.KogitoBuildHolder{
KogitoServiceHolder: &bddtypes.KogitoServiceHolder{KogitoService: kogitoRuntime},
KogitoBuild: kogitoBuild,
}
if table != nil {
err = mappers.MapKogitoBuildTable(table, buildHolder)
}
framework.SetupKogitoBuildImageStreams(kogitoBuild)
return buildHolder, err
}
|
registerKogitoBuildSteps
|
base.rs
|
use super::{Phdr32, Phdr64, Segment32, Segment64};
pub(crate) struct Segment {
pub phdr: Phdr,
}
pub(crate) enum Phdr {
Phdr32(Phdr32),
Phdr64(Phdr64),
}
impl Segment {
pub fn as_64bit(&self) -> Segment64 {
Segment64 {
header: self.phdr.as_64bit(),
}
}
pub fn as_32bit(&self) -> Segment32 {
Segment32 {
header: self.phdr.as_32bit(),
}
}
}
impl Phdr {
pub fn as_64bit(&self) -> Phdr64 {
match self {
Self::Phdr64(phdr) => *phdr,
_ => unreachable!(),
}
|
pub fn as_32bit(&self) -> Phdr32 {
match self {
Self::Phdr32(phdr) => *phdr,
_ => unreachable!(),
}
}
}
|
}
|
conf.py
|
# Copyright (c) OpenMMLab. All rights reserved.
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import subprocess
import sys
import pytorch_sphinx_theme
sys.path.insert(0, os.path.abspath('..'))
# -- Project information -----------------------------------------------------
project = 'MMAction2'
copyright = '2020, OpenMMLab'
author = 'MMAction2 Authors'
version_file = '../mmaction/version.py'
def get_version():
|
# The full version, including alpha/beta/rc tags
release = get_version()
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.napoleon', 'sphinx.ext.viewcode',
'sphinx_markdown_tables', 'sphinx_copybutton', 'myst_parser'
]
# numpy and torch are required
autodoc_mock_imports = ['mmaction.version', 'PIL']
copybutton_prompt_text = r'>>> |\.\.\. '
copybutton_prompt_is_regexp = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
source_suffix = {'.rst': 'restructuredtext', '.md': 'markdown'}
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'pytorch_sphinx_theme'
html_theme_path = [pytorch_sphinx_theme.get_html_theme_path()]
html_theme_options = {
# 'logo_url': 'https://mmocr.readthedocs.io/en/latest/',
'menu': [
{
'name':
'教程',
'url':
'https://colab.research.google.com/github/'
'open-mmlab/mmocr/blob/main/demo/MMOCR_Tutorial.ipynb'
},
{
'name': 'GitHub',
'url': 'https://github.com/open-mmlab/mmocr'
},
{
'name':
'上游代码库',
'children': [
{
'name': 'MMCV',
'url': 'https://github.com/open-mmlab/mmcv',
'description': '计算机视觉基础库'
},
{
'name': 'MMClassification',
'url': 'https://github.com/open-mmlab/mmclassification',
'description': '图像分类代码库'
},
{
'name': 'MMDetection',
'url': 'https://github.com/open-mmlab/mmdetection',
'description': '物体检测代码库'
},
]
},
{
'name':
'OpenMMLab 各项目',
'children': [
{
'name': 'MMAction2',
'url': 'https://github.com/open-mmlab/mmaction2',
},
{
'name': 'MMClassification',
'url': 'https://github.com/open-mmlab/mmclassification',
},
{
'name': 'MMSegmentation',
'url': 'https://github.com/open-mmlab/mmsegmentation',
},
{
'name': 'MMDetection3D',
'url': 'https://github.com/open-mmlab/mmdetection3d',
},
{
'name': 'MMEditing',
'url': 'https://github.com/open-mmlab/mmediting',
},
{
'name': 'MMDetection3D',
'url': 'https://github.com/open-mmlab/mmdetection3d',
},
{
'name': 'MMPose',
'url': 'https://github.com/open-mmlab/mmpose',
},
{
'name': 'MMTracking',
'url': 'https://github.com/open-mmlab/mmtracking',
},
{
'name': 'MMGeneration',
'url': 'https://github.com/open-mmlab/mmgeneration',
},
{
'name': 'MMOCR',
'url': 'https://github.com/open-mmlab/mmocr',
},
]
},
{
'name':
'OpenMMLab',
'children': [
{
'name': '主页',
'url': 'https://openmmlab.com/'
},
{
'name': 'GitHub',
'url': 'https://github.com/open-mmlab/'
},
]
},
]
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_css_files = ['css/readthedocs.css']
myst_enable_extensions = ['colon_fence']
language = 'zh_CN'
master_doc = 'index'
def builder_inited_handler(app):
subprocess.run(['./merge_docs.sh'])
subprocess.run(['./stat.py'])
def setup(app):
app.connect('builder-inited', builder_inited_handler)
|
with open(version_file, 'r') as f:
exec(compile(f.read(), version_file, 'exec'))
return locals()['__version__']
|
read_vector_use_for.rs
|
fn main() {
let v = vec![1, 2, 3, 4];
for i in &v {
*i = *i + 2;
println!("{}", i);
}
|
}
|
|
diagnose_migration_history_tests.rs
|
use migration_core::{
commands::{
CreateMigrationOutput, DiagnoseMigrationHistoryInput, DiagnoseMigrationHistoryOutput, DriftDiagnostic,
HistoryDiagnostic,
},
migration_api,
};
use migration_engine_tests::test_api::*;
use pretty_assertions::assert_eq;
use user_facing_errors::{migration_engine::ShadowDbCreationError, UserFacingError};
#[test_connector]
fn diagnose_migrations_history_on_an_empty_database_without_migration_returns_nothing(api: TestApi) {
let directory = api.create_migrations_directory();
let output = api.diagnose_migration_history(&directory).send_sync().into_output();
assert!(output.is_empty());
}
#[test_connector]
fn diagnose_migrations_history_after_two_migrations_happy_path(api: TestApi) {
let directory = api.create_migrations_directory();
let dm1 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
}
"#,
);
api.create_migration("initial", &dm1, &directory).send_sync();
let dm2 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
fluffiness Float
}
"#,
);
api.create_migration("second-migration", &dm2, &directory).send_sync();
api.apply_migrations(&directory)
.send_sync()
.assert_applied_migrations(&["initial", "second-migration"]);
let output = api.diagnose_migration_history(&directory).send_sync().into_output();
assert!(output.is_empty());
}
#[test_connector(tags(Postgres))]
fn diagnose_migration_history_with_opt_in_to_shadow_database_calculates_drift(api: TestApi) {
let directory = api.create_migrations_directory();
let dm1 = r#"
model Cat {
id Int @id
name String
}
"#;
api.create_migration("initial", dm1, &directory).send_sync();
api.apply_migrations(&directory)
.send_sync()
.assert_applied_migrations(&["initial"]);
let dm2 = r#"
model Cat {
id Int @id
name String
fluffiness Float
}
"#;
api.schema_push_w_datasource(dm2).send();
let DiagnoseMigrationHistoryOutput {
drift,
history,
failed_migration_names,
edited_migration_names,
has_migrations_table,
error_in_unapplied_migration,
} = api
.diagnose_migration_history(&directory)
.opt_in_to_shadow_database(true)
.send_sync()
.into_output();
let rollback = drift.unwrap().unwrap_drift_detected();
let snapshot = expect_test::expect![[r#"
[*] Changed the `Cat` table
[+] Added column `fluffiness`
"#]];
snapshot.assert_eq(&rollback);
assert!(history.is_none());
assert!(failed_migration_names.is_empty());
assert!(edited_migration_names.is_empty());
assert!(has_migrations_table);
assert!(error_in_unapplied_migration.is_none());
}
#[test_connector]
fn diagnose_migration_history_without_opt_in_to_shadow_database_does_not_calculate_drift(api: TestApi) {
let directory = api.create_migrations_directory();
let dm1 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
}
"#,
);
api.create_migration("initial", &dm1, &directory).send_sync();
api.apply_migrations(&directory)
.send_sync()
.assert_applied_migrations(&["initial"]);
let dm2 = r#"
model Cat {
id Int @id
name String
fluffiness Float
}
"#;
api.schema_push_w_datasource(dm2).send();
let DiagnoseMigrationHistoryOutput {
drift,
history,
failed_migration_names,
edited_migration_names,
has_migrations_table,
error_in_unapplied_migration,
} = api.diagnose_migration_history(&directory).send_sync().into_output();
assert!(drift.is_none());
assert!(history.is_none());
assert!(failed_migration_names.is_empty());
assert!(edited_migration_names.is_empty());
assert!(has_migrations_table);
assert!(error_in_unapplied_migration.is_none());
}
#[test_connector(exclude(Postgres, Mssql))]
fn diagnose_migration_history_calculates_drift_in_presence_of_failed_migrations(api: TestApi) {
let directory = api.create_migrations_directory();
let dm1 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
}
"#,
);
api.create_migration("01_initial", &dm1, &directory).send_sync();
let dm2 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
}
model Dog {
id Int @id
fluffiness Float
}
"#,
);
let migration_two = api
.create_migration("02_add_dogs", &dm2, &directory)
.send_sync()
.modify_migration(|migration| {
migration.push_str("\nSELECT YOLO;");
});
let err = api.apply_migrations(&directory).send_unwrap_err().to_string();
assert!(err.contains("yolo") || err.contains("YOLO"), "{}", err);
migration_two.modify_migration(|migration| migration.truncate(migration.len() - "SELECT YOLO;".len()));
let DiagnoseMigrationHistoryOutput {
drift,
history,
failed_migration_names,
edited_migration_names,
has_migrations_table,
error_in_unapplied_migration,
} = api
.diagnose_migration_history(&directory)
.opt_in_to_shadow_database(true)
.send_sync()
.into_output();
let summary = drift.unwrap().unwrap_drift_detected();
assert!(summary.starts_with("\n[+] Added tables"), "{}", summary);
assert!(history.is_none());
assert_eq!(failed_migration_names.len(), 1);
assert_eq!(edited_migration_names.len(), 1);
assert!(has_migrations_table);
assert!(error_in_unapplied_migration.is_none());
}
#[test_connector]
fn diagnose_migrations_history_can_detect_when_the_database_is_behind(api: TestApi) {
let directory = api.create_migrations_directory();
let dm1 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
}
"#,
);
api.create_migration("initial", &dm1, &directory).send_sync();
api.apply_migrations(&directory)
.send_sync()
.assert_applied_migrations(&["initial"]);
let dm2 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
fluffiness Float
}
"#,
);
let name = api
.create_migration("second-migration", &dm2, &directory)
.send_sync()
.into_output()
.generated_migration_name
.unwrap();
let DiagnoseMigrationHistoryOutput {
drift,
history,
failed_migration_names,
edited_migration_names,
has_migrations_table,
error_in_unapplied_migration,
} = api.diagnose_migration_history(&directory).send_sync().into_output();
assert!(drift.is_none());
assert!(failed_migration_names.is_empty());
assert!(edited_migration_names.is_empty());
assert_eq!(
history,
Some(HistoryDiagnostic::DatabaseIsBehind {
unapplied_migration_names: vec![name],
})
);
assert!(has_migrations_table);
assert!(error_in_unapplied_migration.is_none());
}
#[test_connector]
fn diagnose_migrations_history_can_detect_when_the_folder_is_behind(api: TestApi) {
let directory = api.create_migrations_directory();
let dm1 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
}
"#,
);
api.create_migration("initial", &dm1, &directory).send_sync();
let dm2 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
fluffiness Float
}
"#,
);
let name = api
.create_migration("second-migration", &dm2, &directory)
.send_sync()
.into_output()
.generated_migration_name
.unwrap();
api.apply_migrations(&directory)
.send_sync()
.assert_applied_migrations(&["initial", "second-migration"]);
let second_migration_folder_path = directory.path().join(&name);
std::fs::remove_dir_all(&second_migration_folder_path).unwrap();
let DiagnoseMigrationHistoryOutput {
drift,
history,
failed_migration_names,
edited_migration_names,
has_migrations_table,
error_in_unapplied_migration,
} = api
.diagnose_migration_history(&directory)
.opt_in_to_shadow_database(true)
.send_sync()
.into_output();
assert!(failed_migration_names.is_empty());
assert!(edited_migration_names.is_empty());
assert!(matches!(drift, Some(DriftDiagnostic::DriftDetected { summary: _ })));
assert_eq!(
history,
Some(HistoryDiagnostic::MigrationsDirectoryIsBehind {
unpersisted_migration_names: vec![name],
})
);
assert!(has_migrations_table);
assert!(error_in_unapplied_migration.is_none());
}
#[test_connector]
fn diagnose_migrations_history_can_detect_when_history_diverges(api: TestApi) {
let directory = api.create_migrations_directory();
let dm1 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
}
"#,
);
let first_migration_name = api
.create_migration("1-initial", &dm1, &directory)
.send_sync()
.into_output()
.generated_migration_name
.unwrap();
let dm2 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
fluffiness Float
}
"#,
);
let deleted_migration_name = api
.create_migration("2-second-migration", &dm2, &directory)
.send_sync()
.into_output()
.generated_migration_name
.unwrap();
api.apply_migrations(&directory)
.send_sync()
.assert_applied_migrations(&["1-initial", "2-second-migration"]);
let second_migration_folder_path = directory.path().join(&deleted_migration_name);
std::fs::remove_dir_all(&second_migration_folder_path).unwrap();
let dm3 = api.datamodel_with_provider(
r#"
model Dog {
id Int @id
name String
fluffiness Float
}
"#,
);
let unapplied_migration_name = api
.create_migration("3-create-dog", &dm3, &directory)
.draft(true)
.send_sync()
.assert_migration_directories_count(2)
.into_output()
.generated_migration_name
.unwrap();
let DiagnoseMigrationHistoryOutput {
history,
drift,
failed_migration_names,
edited_migration_names,
has_migrations_table,
error_in_unapplied_migration,
} = api
.diagnose_migration_history(&directory)
.opt_in_to_shadow_database(true)
.send_sync()
.into_output();
assert!(failed_migration_names.is_empty());
assert!(edited_migration_names.is_empty());
assert!(matches!(drift, Some(DriftDiagnostic::DriftDetected { summary: _ })));
assert_eq!(
history,
Some(HistoryDiagnostic::HistoriesDiverge {
unapplied_migration_names: vec![unapplied_migration_name],
unpersisted_migration_names: vec![deleted_migration_name],
last_common_migration_name: Some(first_migration_name),
})
);
assert!(has_migrations_table);
assert!(error_in_unapplied_migration.is_none());
}
#[test_connector]
fn diagnose_migrations_history_can_detect_edited_migrations(api: TestApi) {
let directory = api.create_migrations_directory();
let dm1 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
}
"#,
);
let initial_assertions = api.create_migration("initial", &dm1, &directory).send_sync();
let dm2 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
fluffiness Float
}
"#,
);
api.create_migration("second-migration", &dm2, &directory).send_sync();
api.apply_migrations(&directory)
.send_sync()
.assert_applied_migrations(&["initial", "second-migration"]);
let initial_migration_name = initial_assertions
.modify_migration(|script| {
std::mem::swap(script, &mut format!("/* test */\n{}", script));
})
.into_output()
.generated_migration_name
.unwrap();
let DiagnoseMigrationHistoryOutput {
drift,
history,
edited_migration_names,
failed_migration_names,
has_migrations_table,
error_in_unapplied_migration,
} = api.diagnose_migration_history(&directory).send_sync().into_output();
assert!(drift.is_none());
assert!(history.is_none());
assert!(failed_migration_names.is_empty());
assert_eq!(edited_migration_names, &[initial_migration_name]);
assert!(has_migrations_table);
assert!(error_in_unapplied_migration.is_none());
}
#[test_connector]
fn diagnose_migrations_history_reports_migrations_failing_to_apply_cleanly(api: TestApi) {
let directory = api.create_migrations_directory();
let dm1 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
}
"#,
);
let initial_assertions = api.create_migration("initial", &dm1, &directory).send_sync();
let dm2 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
fluffiness Float
}
"#,
);
api.create_migration("second-migration", &dm2, &directory).send_sync();
api.apply_migrations(&directory)
.send_sync()
.assert_applied_migrations(&["initial", "second-migration"]);
let initial_migration_name = initial_assertions
.modify_migration(|script| {
script.push_str("SELECT YOLO;\n");
})
.into_output()
.generated_migration_name
.unwrap();
let DiagnoseMigrationHistoryOutput {
failed_migration_names,
edited_migration_names,
history,
drift,
has_migrations_table,
error_in_unapplied_migration,
} = api
.diagnose_migration_history(&directory)
.opt_in_to_shadow_database(true)
.send_sync()
.into_output();
assert!(has_migrations_table);
assert_eq!(edited_migration_names, &[initial_migration_name.as_str()]);
assert!(failed_migration_names.is_empty());
assert_eq!(history, None);
assert!(error_in_unapplied_migration.is_none());
match drift {
Some(DriftDiagnostic::MigrationFailedToApply { error }) => {
let known_error = error.to_user_facing().unwrap_known();
assert_eq!(
known_error.error_code,
user_facing_errors::migration_engine::MigrationDoesNotApplyCleanly::ERROR_CODE
);
assert_eq!(known_error.meta["migration_name"], initial_migration_name.as_str());
assert!(
known_error.message.contains("yolo")
|| known_error.message.contains("YOLO")
|| known_error.message.contains("(not available)")
);
}
_ => panic!("assertion failed"),
}
}
#[test_connector]
fn diagnose_migrations_history_with_a_nonexistent_migrations_directory_works(api: TestApi) {
let directory = api.create_migrations_directory();
std::fs::remove_dir(directory.path()).unwrap();
let DiagnoseMigrationHistoryOutput {
drift,
history,
edited_migration_names,
failed_migration_names,
has_migrations_table,
error_in_unapplied_migration,
} = api.diagnose_migration_history(&directory).send_sync().into_output();
assert!(drift.is_none());
assert!(history.is_none());
assert!(failed_migration_names.is_empty());
assert!(edited_migration_names.is_empty());
assert!(!has_migrations_table);
assert!(error_in_unapplied_migration.is_none());
}
#[test_connector]
fn dmh_with_a_failed_migration(api: TestApi) {
let migrations_directory = api.create_migrations_directory();
let dm = api.datamodel_with_provider(
r#"
model catsu {
id Int @id
}
"#,
);
let CreateMigrationOutput {
generated_migration_name,
} = api
.create_migration("01-init", &dm, &migrations_directory)
.send_sync()
.assert_migration_directories_count(1)
.modify_migration(|migration| {
migration.clear();
migration.push_str("CREATE_BROKEN");
})
.into_output();
let err = api
.apply_migrations(&migrations_directory)
.send_unwrap_err()
.to_string();
if api.is_mssql() {
assert!(err.contains("Could not find stored procedure"), "{}", err)
}
let DiagnoseMigrationHistoryOutput {
drift,
history,
failed_migration_names,
edited_migration_names,
has_migrations_table,
error_in_unapplied_migration,
} = api
.diagnose_migration_history(&migrations_directory)
.opt_in_to_shadow_database(true)
.send_sync()
.into_output();
assert!(drift.is_none());
assert!(history.is_none());
assert!(edited_migration_names.is_empty());
assert!(has_migrations_table);
assert_eq!(failed_migration_names, &[generated_migration_name.unwrap()]);
let error_in_unapplied_migration = error_in_unapplied_migration
.expect("No error in unapplied migrations, but we expected one.")
.to_user_facing();
let message = error_in_unapplied_migration.message().to_owned();
assert!(
message.contains("01-init` failed to apply cleanly to the shadow database."),
"{}",
message,
);
assert_eq!(
error_in_unapplied_migration.unwrap_known().error_code,
user_facing_errors::migration_engine::MigrationDoesNotApplyCleanly::ERROR_CODE,
);
}
#[test_connector]
fn dmh_with_an_invalid_unapplied_migration_should_report_it(api: TestApi) {
let directory = api.create_migrations_directory();
let dm1 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
}
"#,
);
api.create_migration("initial", &dm1, &directory).send_sync();
api.apply_migrations(&directory)
.send_sync()
.assert_applied_migrations(&["initial"]);
let dm2 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
name String
fluffiness Float
}
"#,
);
let CreateMigrationOutput {
generated_migration_name,
} = api
.create_migration("second-migration", &dm2, &directory)
.send_sync()
.modify_migration(|script| {
*script = "CREATE BROKEN".into();
})
.into_output();
let DiagnoseMigrationHistoryOutput {
failed_migration_names,
edited_migration_names,
history,
drift,
has_migrations_table,
error_in_unapplied_migration,
} = api
.diagnose_migration_history(&directory)
.opt_in_to_shadow_database(true)
.send_sync()
.into_output();
assert!(has_migrations_table);
assert!(edited_migration_names.is_empty());
assert!(failed_migration_names.is_empty());
assert!(
matches!(history, Some(HistoryDiagnostic::DatabaseIsBehind { unapplied_migration_names: names }) if names == [generated_migration_name.unwrap()])
);
assert!(drift.is_none());
let error_in_unapplied_migration = error_in_unapplied_migration
.expect("No error in unapplied migrations, but we expected one.")
.to_user_facing();
let message = error_in_unapplied_migration.message().to_owned();
assert!(
message.contains("_second-migration` failed to apply cleanly to the shadow database."),
"{}",
message,
);
assert_eq!(
error_in_unapplied_migration.unwrap_known().error_code,
user_facing_errors::migration_engine::MigrationDoesNotApplyCleanly::ERROR_CODE,
);
}
#[test_connector(tags(Postgres))]
fn drift_can_be_detected_without_migrations_table(api: TestApi) {
let directory = api.create_migrations_directory();
api.raw_cmd("CREATE TABLE \"Cat\" (\nid SERIAL PRIMARY KEY\n);");
let dm1 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id @default(autoincrement())
}
"#,
);
api.create_migration("initial", &dm1, &directory).send_sync();
let DiagnoseMigrationHistoryOutput {
drift,
history,
edited_migration_names,
failed_migration_names,
has_migrations_table,
error_in_unapplied_migration,
} = api
.diagnose_migration_history(&directory)
.opt_in_to_shadow_database(true)
.send_sync()
.into_output();
assert!(matches!(drift, Some(DriftDiagnostic::DriftDetected { summary: _ })));
assert!(
matches!(history, Some(HistoryDiagnostic::DatabaseIsBehind { unapplied_migration_names: migs }) if migs.len() == 1)
);
assert!(failed_migration_names.is_empty());
assert!(edited_migration_names.is_empty());
assert!(!has_migrations_table);
assert!(error_in_unapplied_migration.is_none());
}
#[test_connector(tags(Mysql8), exclude(Vitess))]
fn shadow_database_creation_error_is_special_cased_mysql(api: TestApi) {
let directory = api.create_migrations_directory();
let dm1 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id @default(autoincrement())
}
"#,
);
api.create_migration("initial", &dm1, &directory).send_sync();
api.raw_cmd(&format!(
"
DROP USER IF EXISTS 'prismashadowdbtestuser2';
CREATE USER 'prismashadowdbtestuser2' IDENTIFIED by '1234batman';
GRANT ALL PRIVILEGES ON {}.* TO 'prismashadowdbtestuser2';
",
api.connection_info().dbname().unwrap(),
));
let datamodel = format!(
r#"
datasource db {{
provider = "mysql"
url = "mysql://prismashadowdbtestuser2:1234batman@{dbhost}:{dbport}/{dbname}"
}}
"#,
dbhost = api.connection_info().host(),
dbname = api.connection_info().dbname().unwrap(),
dbport = api.connection_info().port().unwrap_or(3306),
);
let migration_api = migration_api(&datamodel).unwrap();
let output = api
.block_on(
migration_api.diagnose_migration_history(&DiagnoseMigrationHistoryInput {
migrations_directory_path: directory.path().as_os_str().to_string_lossy().into_owned(),
opt_in_to_shadow_database: true,
}),
)
.unwrap();
assert!(
matches!(output.drift, Some(DriftDiagnostic::MigrationFailedToApply { error }) if error.to_user_facing().as_known().unwrap().error_code == ShadowDbCreationError::ERROR_CODE)
);
}
#[test_connector(tags(Postgres12))]
fn shadow_database_creation_error_is_special_cased_postgres(api: TestApi) {
let directory = api.create_migrations_directory();
let dm1 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id @default(autoincrement())
}
"#,
);
api.create_migration("initial", &dm1, &directory).send_sync();
api.raw_cmd(
"
DROP USER IF EXISTS prismashadowdbtestuser2;
CREATE USER prismashadowdbtestuser2 PASSWORD '1234batman' LOGIN;
",
);
let datamodel = format!(
r#"
datasource db {{
provider = "postgresql"
url = "postgresql://prismashadowdbtestuser2:1234batman@{dbhost}:{dbport}/{dbname}"
}}
"#,
dbhost = api.connection_info().host(),
dbname = api.connection_info().dbname().unwrap(),
dbport = api.connection_info().port().unwrap_or(5432),
);
let output = api
.block_on(async {
migration_api(&datamodel)
.unwrap()
.diagnose_migration_history(&DiagnoseMigrationHistoryInput {
migrations_directory_path: directory.path().as_os_str().to_string_lossy().into_owned(),
opt_in_to_shadow_database: true,
})
.await
})
.unwrap();
assert!(
matches!(output.drift, Some(DriftDiagnostic::MigrationFailedToApply { error }) if error.to_user_facing().as_known().unwrap().error_code == ShadowDbCreationError::ERROR_CODE)
);
}
#[test_connector(tags(Mssql2019))]
fn shadow_database_creation_error_is_special_cased_mssql(api: TestApi) {
let directory = api.create_migrations_directory();
let dm1 = api.datamodel_with_provider(
r#"
model Cat {
id Int @id @default(autoincrement())
}
"#,
);
api.create_migration("initial", &dm1, &directory).send_sync();
api.raw_cmd(
"
BEGIN TRY
CREATE LOGIN prismashadowdbtestuser WITH PASSWORD = '1234batmanZ';
GRANT SELECT TO prismashadowdbuser;
END TRY
BEGIN CATCH
END CATCH;
",
);
let datamodel = format!(
r#"
datasource db {{
provider = "sqlserver"
url = "sqlserver://{dbhost}:{dbport};database={dbname};user=prismashadowdbtestuser;password=1234batmanZ;trustservercertificate=true"
}}
"#,
dbhost = api.connection_info().host(),
dbname = api.connection_info().dbname().unwrap(),
dbport = api.connection_info().port().unwrap(),
);
let mut tries = 0;
let migration_api = loop {
if tries > 5 {
panic!("Failed to connect to mssql more than five times.");
}
let result = migration_api(&datamodel);
match result {
Ok(api) => break api,
Err(err) => {
eprintln!("got err, sleeping\nerr:{:?}", err);
tries += 1;
std::thread::sleep(std::time::Duration::from_millis(200));
}
}
};
let output = api
.block_on(
migration_api.diagnose_migration_history(&DiagnoseMigrationHistoryInput {
migrations_directory_path: directory.path().as_os_str().to_string_lossy().into_owned(),
opt_in_to_shadow_database: true,
}),
)
.unwrap();
assert!(
matches!(output.drift, Some(DriftDiagnostic::MigrationFailedToApply { error }) if error.to_user_facing().as_known().unwrap().error_code == ShadowDbCreationError::ERROR_CODE)
);
}
#[test_connector(tags(Sqlite))]
fn empty_migration_directories_should_cause_known_errors(api: TestApi) {
let migrations_directory = api.create_migrations_directory();
let dm = api.datamodel_with_provider(
r#"
model Cat {
id Int @id
hasBox Boolean
}
"#,
);
let output = api
.create_migration("01init", &dm, &migrations_directory)
.send_sync()
.into_output();
api.apply_migrations(&migrations_directory)
.send_sync()
.assert_applied_migrations(&["01init"]);
let dirname = output.generated_migration_name.unwrap();
let dirpath = migrations_directory.path().join(dirname);
assert!(dirpath.exists());
|
assert!(filepath.exists());
std::fs::remove_file(&filepath).unwrap();
let err = api
.diagnose_migration_history(&migrations_directory)
.send_unwrap_err()
.to_user_facing()
.unwrap_known();
assert_eq!(
err.error_code,
user_facing_errors::migration_engine::MigrationFileNotFound::ERROR_CODE
);
assert_eq!(
err.meta,
serde_json::json!({ "migration_file_path": filepath.to_string_lossy(), })
);
}
#[test_connector]
fn indexes_on_same_columns_with_different_names_should_work(api: TestApi) {
let directory = api.create_migrations_directory();
let dm = api.datamodel_with_provider(
r#"
model a {
users_id Int
roles_id Int
@@id([users_id, roles_id])
@@unique([users_id, roles_id], name: "unique_constraint")
@@index([users_id, roles_id], name: "users_has_roles.users_id_roles_id_index")
@@index([users_id, roles_id], name: "users_id_with_roles_id_index")
}
"#,
);
api.create_migration("initial", &dm, &directory).send_sync();
api.apply_migrations(&directory)
.send_sync()
.assert_applied_migrations(&["initial"]);
let output = api
.diagnose_migration_history(&directory)
.opt_in_to_shadow_database(true)
.send_sync()
.into_output();
assert!(output.drift.is_none());
}
#[test_connector(tags(Postgres))]
fn default_dbgenerated_should_not_cause_drift(api: TestApi) {
let migrations_directory = api.create_migrations_directory();
let dm = api.datamodel_with_provider(
r#"
model A {
id String @id @default(dbgenerated("(now())::TEXT"))
}
"#,
);
api.create_migration("01init", &dm, &migrations_directory).send_sync();
api.apply_migrations(&migrations_directory)
.send_sync()
.assert_applied_migrations(&["01init"]);
let output = api
.diagnose_migration_history(&migrations_directory)
.opt_in_to_shadow_database(true)
.send_sync()
.into_output();
assert!(output.drift.is_none());
}
#[test_connector(tags(Postgres))]
fn default_uuid_should_not_cause_drift(api: TestApi) {
let migrations_directory = api.create_migrations_directory();
let dm = api.datamodel_with_provider(
r#"
model A {
id String @id @db.Uuid
uuid String @db.Uuid @default("00000000-0000-0000-0016-000000000004")
}
"#,
);
api.create_migration("01init", &dm, &migrations_directory).send_sync();
api.apply_migrations(&migrations_directory)
.send_sync()
.assert_applied_migrations(&["01init"]);
let output = api
.diagnose_migration_history(&migrations_directory)
.opt_in_to_shadow_database(true)
.send_sync()
.into_output();
assert!(output.drift.is_none());
}
|
let filepath = dirpath.join("migration.sql");
|
shared.rs
|
use crate::task::{waker_ref, ArcWake};
use futures_core::future::{FusedFuture, Future};
use futures_core::task::{Context, Poll, Waker};
use slab::Slab;
use std::cell::UnsafeCell;
use std::fmt;
use std::pin::Pin;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::{Acquire, SeqCst};
use std::sync::{Arc, Mutex};
/// Future for the [`shared`](super::FutureExt::shared) method.
#[must_use = "futures do nothing unless you `.await` or poll them"]
pub struct Shared<Fut: Future> {
inner: Option<Arc<Inner<Fut>>>,
waker_key: usize,
}
struct Inner<Fut: Future> {
future_or_output: UnsafeCell<FutureOrOutput<Fut>>,
notifier: Arc<Notifier>,
}
struct Notifier {
state: AtomicUsize,
wakers: Mutex<Option<Slab<Option<Waker>>>>,
}
// The future itself is polled behind the `Arc`, so it won't be moved
// when `Shared` is moved.
impl<Fut: Future> Unpin for Shared<Fut> {}
impl<Fut: Future> fmt::Debug for Shared<Fut> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Shared")
.field("inner", &self.inner)
.field("waker_key", &self.waker_key)
.finish()
}
}
impl<Fut: Future> fmt::Debug for Inner<Fut> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Inner").finish()
}
}
enum FutureOrOutput<Fut: Future> {
Future(Fut),
Output(Fut::Output),
}
unsafe impl<Fut> Send for Inner<Fut>
where
Fut: Future + Send,
Fut::Output: Send + Sync,
{
}
unsafe impl<Fut> Sync for Inner<Fut>
where
Fut: Future + Send,
Fut::Output: Send + Sync,
{
}
const IDLE: usize = 0;
const POLLING: usize = 1;
const COMPLETE: usize = 2;
const POISONED: usize = 3;
const NULL_WAKER_KEY: usize = usize::max_value();
impl<Fut: Future> Shared<Fut> {
pub(super) fn new(future: Fut) -> Shared<Fut> {
let inner = Inner {
future_or_output: UnsafeCell::new(FutureOrOutput::Future(future)),
notifier: Arc::new(Notifier {
state: AtomicUsize::new(IDLE),
wakers: Mutex::new(Some(Slab::new())),
}),
};
Shared {
inner: Some(Arc::new(inner)),
waker_key: NULL_WAKER_KEY,
}
|
impl<Fut> Shared<Fut>
where
Fut: Future,
Fut::Output: Clone,
{
/// Returns [`Some`] containing a reference to this [`Shared`]'s output if
/// it has already been computed by a clone or [`None`] if it hasn't been
/// computed yet or this [`Shared`] already returned its output from
/// [`poll`](Future::poll).
pub fn peek(&self) -> Option<&Fut::Output> {
if let Some(inner) = self.inner.as_ref() {
match inner.notifier.state.load(SeqCst) {
COMPLETE => unsafe { return Some(inner.output()) },
POISONED => panic!("inner future panicked during poll"),
_ => {}
}
}
None
}
}
impl<Fut> Inner<Fut>
where
Fut: Future,
Fut::Output: Clone,
{
/// Safety: callers must first ensure that `self.inner.state`
/// is `COMPLETE`
unsafe fn output(&self) -> &Fut::Output {
match &*self.future_or_output.get() {
FutureOrOutput::Output(ref item) => &item,
FutureOrOutput::Future(_) => unreachable!(),
}
}
/// Registers the current task to receive a wakeup when we are awoken.
fn record_waker(&self, waker_key: &mut usize, cx: &mut Context<'_>) {
let mut wakers_guard = self.notifier.wakers.lock().unwrap();
let wakers = match wakers_guard.as_mut() {
Some(wakers) => wakers,
None => return,
};
let new_waker = cx.waker();
if *waker_key == NULL_WAKER_KEY {
*waker_key = wakers.insert(Some(new_waker.clone()));
} else {
match wakers[*waker_key] {
Some(ref old_waker) if new_waker.will_wake(old_waker) => {}
// Could use clone_from here, but Waker doesn't specialize it.
ref mut slot => *slot = Some(new_waker.clone()),
}
}
debug_assert!(*waker_key != NULL_WAKER_KEY);
}
/// Safety: callers must first ensure that `inner.state`
/// is `COMPLETE`
unsafe fn take_or_clone_output(self: Arc<Self>) -> Fut::Output {
match Arc::try_unwrap(self) {
Ok(inner) => match inner.future_or_output.into_inner() {
FutureOrOutput::Output(item) => item,
FutureOrOutput::Future(_) => unreachable!(),
},
Err(inner) => inner.output().clone(),
}
}
}
impl<Fut> FusedFuture for Shared<Fut>
where
Fut: Future,
Fut::Output: Clone,
{
fn is_terminated(&self) -> bool {
self.inner.is_none()
}
}
impl<Fut> Future for Shared<Fut>
where
Fut: Future,
Fut::Output: Clone,
{
type Output = Fut::Output;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = &mut *self;
let inner = this
.inner
.take()
.expect("Shared future polled again after completion");
// Fast path for when the wrapped future has already completed
if inner.notifier.state.load(Acquire) == COMPLETE {
// Safety: We're in the COMPLETE state
return unsafe { Poll::Ready(inner.take_or_clone_output()) };
}
inner.record_waker(&mut this.waker_key, cx);
match inner.notifier.state.compare_and_swap(IDLE, POLLING, SeqCst) {
IDLE => {
// Lock acquired, fall through
}
POLLING => {
// Another task is currently polling, at this point we just want
// to ensure that the waker for this task is registered
this.inner = Some(inner);
return Poll::Pending;
}
COMPLETE => {
// Safety: We're in the COMPLETE state
return unsafe { Poll::Ready(inner.take_or_clone_output()) };
}
POISONED => panic!("inner future panicked during poll"),
_ => unreachable!(),
}
let waker = waker_ref(&inner.notifier);
let mut cx = Context::from_waker(&waker);
struct Reset<'a>(&'a AtomicUsize);
impl Drop for Reset<'_> {
fn drop(&mut self) {
use std::thread;
if thread::panicking() {
self.0.store(POISONED, SeqCst);
}
}
}
let _reset = Reset(&inner.notifier.state);
let output = {
let future = unsafe {
match &mut *inner.future_or_output.get() {
FutureOrOutput::Future(fut) => Pin::new_unchecked(fut),
_ => unreachable!(),
}
};
match future.poll(&mut cx) {
Poll::Pending => {
match inner.notifier.state.compare_and_swap(POLLING, IDLE, SeqCst) {
POLLING => {
// Success
drop(_reset);
this.inner = Some(inner);
return Poll::Pending;
}
_ => unreachable!(),
}
}
Poll::Ready(output) => output,
}
};
unsafe {
*inner.future_or_output.get() = FutureOrOutput::Output(output);
}
inner.notifier.state.store(COMPLETE, SeqCst);
// Wake all tasks and drop the slab
let mut wakers_guard = inner.notifier.wakers.lock().unwrap();
let mut wakers = wakers_guard.take().unwrap();
for opt_waker in wakers.drain() {
if let Some(waker) = opt_waker {
waker.wake();
}
}
drop(_reset); // Make borrow checker happy
drop(wakers_guard);
// Safety: We're in the COMPLETE state
unsafe { Poll::Ready(inner.take_or_clone_output()) }
}
}
impl<Fut> Clone for Shared<Fut>
where
Fut: Future,
{
fn clone(&self) -> Self {
Shared {
inner: self.inner.clone(),
waker_key: NULL_WAKER_KEY,
}
}
}
impl<Fut> Drop for Shared<Fut>
where
Fut: Future,
{
fn drop(&mut self) {
if self.waker_key != NULL_WAKER_KEY {
if let Some(ref inner) = self.inner {
if let Ok(mut wakers) = inner.notifier.wakers.lock() {
if let Some(wakers) = wakers.as_mut() {
wakers.remove(self.waker_key);
}
}
}
}
}
}
impl ArcWake for Notifier {
fn wake_by_ref(arc_self: &Arc<Self>) {
let wakers = &mut *arc_self.wakers.lock().unwrap();
if let Some(wakers) = wakers.as_mut() {
for (_key, opt_waker) in wakers {
if let Some(waker) = opt_waker.take() {
waker.wake();
}
}
}
}
}
|
}
}
|
chains.ts
|
import { Chain } from '../types'
type ChainName =
| 'arbitrumOne'
| 'arbitrumRinkeby'
| 'goerli'
| 'hardhat'
| 'kovan'
| 'localhost'
| 'mainnet'
| 'optimisticEthereum'
| 'optimisticKovan'
| 'polygonMainnet'
| 'polygonTestnetMumbai'
| 'rinkeby'
| 'ropsten'
export const chain: Record<ChainName, Chain> = {
mainnet: {
id: 1,
name: 'Mainnet',
nativeCurrency: { name: 'Ether', symbol: 'ETH', decimals: 18 },
rpcUrls: ['https://mainnet.infura.io/v3'],
blockExplorers: [
{
name: 'Etherscan',
url: 'https://etherscan.io',
},
],
},
ropsten: {
id: 3,
name: 'Ropsten',
nativeCurrency: { name: 'Ropsten Ether', symbol: 'ropETH', decimals: 18 },
rpcUrls: ['https://ropsten.infura.io/v3'],
blockExplorers: [
{
name: 'Etherscan',
url: 'https://ropsten.etherscan.io',
},
],
testnet: true,
},
rinkeby: {
id: 4,
name: 'Rinkeby',
nativeCurrency: { name: 'Rinkeby Ether', symbol: 'rETH', decimals: 18 },
rpcUrls: ['https://rinkeby.infura.io/v3'],
blockExplorers: [
{
name: 'Etherscan',
url: 'https://rinkeby.etherscan.io',
},
],
testnet: true,
},
goerli: {
id: 5,
name: 'Goerli',
nativeCurrency: { name: 'Goerli Ether', symbol: 'gETH', decimals: 18 },
rpcUrls: ['https://rinkeby.infura.io/v3'],
blockExplorers: [
{
name: 'Etherscan',
url: 'https://rinkeby.etherscan.io',
},
],
testnet: true,
},
kovan: {
id: 42,
name: 'Kovan',
nativeCurrency: { name: 'Kovan Ether', symbol: 'kETH', decimals: 18 },
rpcUrls: ['https://kovan.infura.io/v3'],
blockExplorers: [
{
name: 'Etherscan',
url: 'https://kovan.etherscan.io',
},
],
testnet: true,
},
optimisticEthereum: {
id: 10,
name: 'Optimistic Ethereum',
nativeCurrency: { name: 'Optimistic Ether', symbol: 'OETH', decimals: 18 },
rpcUrls: ['https://mainnet.optimism.io'],
blockExplorers: [
{
name: 'Etherscan',
url: 'https://optimistic.etherscan.io',
},
],
},
optimisticKovan: {
id: 69,
name: 'Optimistic Kovan',
nativeCurrency: {
name: 'Kovan Ether',
symbol: 'KOR',
decimals: 18,
},
rpcUrls: ['https://kovan.optimism.io'],
blockExplorers: [
{
name: 'Etherscan',
url: 'https://optimistic.etherscan.io',
},
],
testnet: true,
},
polygonMainnet: {
id: 137,
name: 'Polygon Mainnet',
nativeCurrency: { name: 'Matic', symbol: 'MATIC', decimals: 18 },
rpcUrls: [
'https://polygon-rpc.com',
'https://rpc-mainnet.matic.network',
'https://matic-mainnet.chainstacklabs.com',
'https://rpc-mainnet.maticvigil.com',
'https://rpc-mainnet.matic.quiknode.pro',
'https://matic-mainnet-full-rpc.bwarelabs.com',
],
blockExplorers: [
{
name: 'Polygonscan',
url: 'https://polygonscan.com',
},
],
},
polygonTestnetMumbai: {
id: 80001,
name: 'Polygon Testnet Mumbai',
nativeCurrency: {
name: 'Matic',
symbol: 'MATIC',
decimals: 18,
},
rpcUrls: [
'https://matic-mumbai.chainstacklabs.com',
'https://rpc-mumbai.maticvigil.com',
'https://matic-testnet-archive-rpc.bwarelabs.com',
],
blockExplorers: [
{
name: 'Polygonscan',
url: 'https://mumbai.polygonscan.com',
},
],
testnet: true,
},
arbitrumOne: {
id: 42161,
name: 'Arbitrum One',
nativeCurrency: { name: 'Ether', symbol: 'AETH', decimals: 18 },
rpcUrls: ['https://arb1.arbitrum.io/rpc'],
blockExplorers: [
{ name: 'Arbiscan', url: 'https://arbiscan.io' },
{
name: 'Arbitrum Explorer',
url: 'https://explorer.arbitrum.io',
},
],
},
|
id: 421611,
name: 'Rinkeby Arbitrum',
nativeCurrency: {
name: 'Rinkeby ArbEther',
symbol: 'rinkArbETH',
decimals: 18,
},
rpcUrls: ['https://rinkeby.arbitrum.io/rpc'],
blockExplorers: [
{
name: 'Arbitrum Explorer',
url: 'https://rinkeby-explorer.arbitrum.io',
},
],
testnet: true,
},
localhost: {
id: 1337,
name: 'Localhost',
rpcUrls: ['https://127.0.0.1:8545'],
},
hardhat: {
id: 31337,
name: 'hardhat',
rpcUrls: ['http://127.0.0.1:8545'],
},
}
export const defaultChains: Chain[] = [
chain.mainnet,
chain.ropsten,
chain.rinkeby,
chain.goerli,
chain.kovan,
]
export const defaultL2Chains: Chain[] = [
chain.optimisticEthereum,
chain.optimisticKovan,
chain.polygonMainnet,
chain.polygonTestnetMumbai,
chain.arbitrumOne,
chain.arbitrumRinkeby,
]
export const developmentChains: Chain[] = [chain.localhost, chain.hardhat]
|
arbitrumRinkeby: {
|
about_control_statements.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutControlStatements(Koan):
def test_if_then_else_statements(self):
if True:
result = 'true value'
else:
result = 'false value'
self.assertEqual('true value', result)
def test_if_then_statements(self):
result = 'default value'
if True:
result = 'true value'
self.assertEqual('true value', result)
def test_if_then_elif_else_statements(self):
if False:
result = 'first value'
elif True:
result = 'true value'
else:
result = 'default value'
self.assertEqual('true value', result)
def test_while_statement(self):
|
i += 1
self.assertEqual(3628800, result)
def test_break_statement(self):
i = 1
result = 1
while True:
if i > 10: break
result = result * i
i += 1
self.assertEqual(3628800, result)
def test_continue_statement(self):
i = 0
result = []
while i < 10:
i += 1
if (i % 2) == 0: continue
result.append(i)
self.assertEqual([1, 3, 5, 7, 9], result)
def test_for_statement(self):
phrase = ["fish", "and", "chips"]
result = []
for item in phrase:
result.append(item.upper())
self.assertEqual(['FISH', 'AND', 'CHIPS'], result)
def test_for_statement_with_tuples(self):
round_table = [
("Lancelot", "Blue"),
("Galahad", "I don't know!"),
("Robin", "Blue! I mean Green!"),
("Arthur", "Is that an African Swallow or European Swallow?")
]
result = []
for knight, answer in round_table:
result.append("Contestant: '" + knight + "' Answer: '" + answer + "'")
text = ["Contestant: 'Lancelot' Answer: 'Blue'", "Contestant: 'Galahad' Answer: 'I don't know!'", "Contestant: 'Robin' Answer: 'Blue! I mean Green!'", "Contestant: 'Arthur' Answer: 'Is that an African Swallow or European Swallow?'"]
self.assertRegex(result[2], text)
self.assertNotRegex(result[0], text)
self.assertNotRegex(result[1], text)
self.assertNotRegex(result[3], text)
|
i = 1
result = 1
while i <= 10:
result = result * i
|
options.go
|
/*
* Tencent is pleased to support the open source community by making Blueking Container Service available.,
* Copyright (C) 2019 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under,
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package options
import (
"bk-bcs/bcs-common/common/blog"
"bk-bcs/bcs-common/common/conf"
)
const (
// ServiceRegistryKubernetes service discovery for k8s
ServiceRegistryKubernetes = "kubernetes"
// ServiceRegistryMesos service discovery for mesos
ServiceRegistryMesos = "mesos"
)
// NetworkPolicyOption the option of bcs network policy controller
type NetworkPolicyOption struct {
conf.ServiceConfig
conf.ServerOnlyCertConfig
conf.FileConfig
conf.MetricConfig
conf.LogConfig
conf.ProcessConfig
ServiceRegistry string `json:"serviceRegistry" value:"kubernetes" usage:"registry for service discovery; [kubernetes, mesos]"`
KubeMaster string `json:"kubeMaster" value:"" usage:"kube-apiserver url"`
Kubeconfig string `json:"kubeconfig" value:"" usage:"kubeconfig for kube-apiserver, Only required if out-of-cluster."`
KubeResyncPeriod uint `json:"kubeResyncPeried" value:"300" usage:"resync interval for informer factory in seconds; (default 300)"`
KubeCacheSyncTimeout uint `json:"kubeCacheSyncTimeout" value:"10" usage:"wait for kube cache sync timeout in seconds; (default 10)"`
IPTableSyncPeriond uint `json:"iptablesSyncPeriod" value:"300" usage:"interval for sync iptables rules in seconds; (default 300)"`
NetworkInterface string `json:"iface" value:"eth1" usage:"network interface to get ip"`
Debug bool `json:"debug" value:"false" usage:"open pprof"`
}
// New new NetworkPolicyOption object
func New() *NetworkPolicyOption
|
// Parse parse options
func Parse(opt *NetworkPolicyOption) {
conf.Parse(opt)
// validation config
if opt.ServiceRegistry != ServiceRegistryKubernetes && opt.ServiceRegistry != ServiceRegistryMesos {
blog.Fatal("registry for service discovery, available values [kubernetes, mesos]")
}
if len(opt.Kubeconfig) == 0 {
blog.Fatal("kubeconfig cannot be empty")
}
}
|
{
return &NetworkPolicyOption{}
}
|
send_sync.rs
|
use std::thread;
use winrt::foundation::Uri;
// Simple test to validate that types with MarshalingType.Agile are marked Send and Sync
// (if this compiles it worked)
#[test]
fn send_sync() -> winrt::Result<()>
|
{
let url = Uri::create_uri("http://kennykerr.ca")?;
thread::spawn(move || {
assert_eq!("http://kennykerr.ca/", url.to_string().unwrap());
});
Ok(())
}
|
|
processes_scraper_linux.go
|
// Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//go:build linux || openbsd
// +build linux openbsd
package processesscraper
import (
"github.com/shirou/gopsutil/load"
"go.opentelemetry.io/collector/model/pdata"
"go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal/metadata"
)
const unixSystemSpecificMetricsLen = 1
func appendUnixSystemSpecificProcessesMetrics(metrics pdata.MetricSlice, startTime pdata.Timestamp, now pdata.Timestamp, misc *load.MiscStat) error {
initializeProcessesCreatedMetric(metrics.AppendEmpty(), startTime, now, misc)
return nil
}
func
|
(metric pdata.Metric, startTime, now pdata.Timestamp, misc *load.MiscStat) {
metadata.Metrics.SystemProcessesCreated.Init(metric)
ddp := metric.Sum().DataPoints().AppendEmpty()
ddp.SetStartTimestamp(startTime)
ddp.SetTimestamp(now)
ddp.SetIntVal(int64(misc.ProcsCreated))
}
|
initializeProcessesCreatedMetric
|
gatekeeper_app.py
|
#!/usr/bin/env python
import sys
import json
import time
import logging
import traceback
from core import Messages, EncryptedConnection, Gatekeeper
from threads import Heartbeat, EventWatcher
class GatekeeperApp(object):
def run(self, config):
|
if __name__ == "__main__":
# Pull the config
with open('gw_config.json', 'r') as f:
config = json.load(f)
# Pull the command line args
config['initialSync'] = "--sync" in sys.argv
config['syncClocks'] = "--set-time" in sys.argv
config['clearCodes'] = "--clear-all" in sys.argv
if "--debug" in sys.argv:
config['DEBUG'] = True
# Configure logging
log_level = logging.DEBUG if config.get('DEBUG', False) else logging.INFO
logging.basicConfig(format='%(asctime)s [%(levelname)s]: %(message)s', level=log_level)
logging.getLogger("requests").setLevel(logging.WARNING)
# Start the application
app = GatekeeperApp()
app.run(config)
# Copyright 2019 Office Nomads LLC (http://www.officenomads.com/) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
|
try:
logging.info("Starting up Gatekeeper...")
gatekeeper = Gatekeeper(config)
connection = gatekeeper.get_connection()
# Sync our system clocks
gatekeeper.set_system_clock()
# Test the connection encryption
if gatekeeper.test_keymaster_connection():
logging.info("Keymaster encrypted connection successfull!")
# Pull the configuration
gatekeeper.configure_doors()
if len(gatekeeper.doors) == 0:
logging.error("No doors to program. Exiting")
return
logging.info("Configured %d doors" % len(gatekeeper.doors))
# Set the time on each door
if config['syncClocks']:
gatekeeper.sync_clocks()
# Clear out all the door codes if requested
if config['clearCodes']:
gatekeeper.clear_all_codes()
initialSync = True
# Pull new data if requested
if config['initialSync']:
gatekeeper.pull_door_codes()
try:
# Start with a clean bowl
sys.stdout.flush()
heartbeat = None
event_watcher = None
hb_conn_err = False
while True:
# Keep our heartbeat alive
if not heartbeat or not heartbeat.is_alive():
hb_conn_err = False
if heartbeat and heartbeat.error:
try:
# Heartbeat errors can come from a poor connection to the Keymaster
# In cases like these we need to keep retrying to send the log up
gatekeeper.send_gatekeper_log("Heartbeat: " + str(heartbeat.error))
except Exception as e:
hb_conn_err = True
logging.warning("Unable to report hearbeat error!: %s" % str(e))
time.sleep(5)
if not hb_conn_err:
logging.info("Starting Heartbeat...")
poll_delay = config.get('KEYMASTER_POLL_DELAY_SEC', 5)
heartbeat = Heartbeat(connection, poll_delay)
heartbeat.setDaemon(True)
heartbeat.start()
# Keep our event watcher alive
if not event_watcher or not event_watcher.is_alive():
if event_watcher and event_watcher.error:
gatekeeper.send_gatekeper_log("EventWatcher: " + str(event_watcher.error))
time.sleep(5)
logging.info("Starting Event Watcher...")
poll_delay = config.get('EVENT_POLL_DELAY_SEC', 10)
event_watcher = EventWatcher(gatekeeper, poll_delay)
event_watcher.setDaemon(True)
event_watcher.start()
if heartbeat.new_data:
gatekeeper.pull_door_codes()
heartbeat.all_clear()
if event_watcher.new_data:
event_logs = gatekeeper.pull_event_logs()
gatekeeper.push_event_logs(event_logs)
event_watcher.all_clear()
time.sleep(.1)
except KeyboardInterrupt:
logging.info(" Keyboard Interupt!")
logging.info("Shutting down Heartbeat...")
if heartbeat and heartbeat.is_alive():
heartbeat.stop()
#heartbeat.join()
logging.info("Shutting down Event Watcher...")
if event_watcher and event_watcher.is_alive():
event_watcher.stop()
#event_watcher.join()
except Exception as e:
traceback.print_exc()
logging.error("Error: %s" % str(e))
|
IMarketGetResponse.ts
|
import { IResponse } from "./IResponse";
export interface IMarketGetResponse extends IResponse {
/**
* Market data by date.
*/
data?: {
/**
* The market date.
*/
d: string;
/**
* The market price.
*/
p: number;
/**
* The market cap.
*/
m: number;
/**
* 24h Market volume.
*/
v: number;
}[];
/**
* Market data every 5 minutes for the last day.
*/
day?: {
/**
* The timestamp.
*/
t: number;
/**
* The market price.
*/
p: number;
/**
|
* The market cap.
*/
m: number;
/**
* Market volume.
*/
v: number;
}[];
}
| |
DesignPreviewItem.js
|
import React, { PureComponent, Component } from 'react';
import { findDOMNode } from 'react-dom';
|
import scroll from '../utils/scroll';
import offset from '../utils/offset';
function scrollNodeToTop(node, offsets) {
const pos = offset(node);
const top = isFunction(offsets.top)
? offsets.top(pos.top)
: pos.top + offsets.top;
const left = isFunction(offsets.left)
? offsets.left(pos.left)
: pos.left + offsets.left;
scroll(document.body, left, top);
}
export default class DesignPreviewItem extends (PureComponent || Component) {
static propTypes = {
children: PropTypes.node.isRequired,
prefix: PropTypes.string
};
static defaultProps = {
prefix: 'zent'
};
render() {
const { children, prefix } = this.props;
return <div className={`${prefix}-design-preview-item`}>{children}</div>;
}
scrollTop(offsets) {
const node = findDOMNode(this);
scrollNodeToTop(node, offsets);
}
}
|
import PropTypes from 'prop-types';
import isFunction from 'lodash/isFunction';
|
QueryBaseConverter.ts
|
/**
* Defines the base-class for the QueryConverter class, which adds a helper to simplify param-rendering.
*/
export class
|
{
protected addParamIfSet(params: string[], key: string, param: any) {
let value = param.toString();
if (value) {
params.push(`${key}=${encodeURIComponent(value)}`);
}
}
}
|
QueryBaseConverter
|
data.go
|
package data
import (
"spaco_go/internal/conf"
"time"
"github.com/go-kratos/kratos/v2/log"
"github.com/google/wire"
"gorm.io/driver/mysql"
"gorm.io/gorm"
_ "github.com/go-sql-driver/mysql"
)
// ProviderSet is data providers.
var ProviderSet = wire.NewSet(NewData, NewGreeterRepo)
// Data .
type Data struct {
// TODO warpped database client
// client ent.Client
db *gorm.DB
}
// NewData .
func
|
(c *conf.Data, logger log.Logger) (*Data, func(), error) {
log := log.NewHelper("data", logger)
dsn := "root:123!@#qwe@tcp(127.0.0.1:3306)/kratos_demo?charset=utf8mb4&parseTime=True&loc=Local"
client, err := gorm.Open(mysql.Open(dsn), &gorm.Config{})
if err != nil {
log.Errorf("failed opening connection to mysql: %v", err)
return nil, nil, err
}
client.AutoMigrate(&GreeterEntity{})
// // Run the auto migration tool.
// if err := client.Schema.Create(context.Background()); err != nil {
// log.Errorf("failed creating schema resources: %v", err)
// return nil, nil, err
// }
sqlDB, err := client.DB()
// SetMaxIdleConns 用于设置连接池中空闲连接的最大数量。
sqlDB.SetMaxIdleConns(10)
// SetMaxOpenConns 设置打开数据库连接的最大数量。
sqlDB.SetMaxOpenConns(100)
// SetConnMaxLifetime 设置了连接可复用的最大时间。
sqlDB.SetConnMaxLifetime(time.Hour)
d := &Data{
db: client,
}
return d, func() {
logger.Log("message", "closing the data resources")
sqlDB, err := d.db.DB()
if err != nil {
logger.Log("message", "DB也报错了", err)
return
}
sqlDB.Close()
}, nil
}
|
NewData
|
synapse_serviceaccount.go
|
/*
Copyright 2021.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
|
package synapse
import (
corev1 "k8s.io/api/core/v1"
rbacv1 "k8s.io/api/rbac/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
synapsev1alpha1 "github.com/opdev/synapse-operator/apis/synapse/v1alpha1"
)
// serviceAccountForSynapse returns a synapse ServiceAccount object
func (r *SynapseReconciler) serviceAccountForSynapse(s *synapsev1alpha1.Synapse, objectMeta metav1.ObjectMeta) (client.Object, error) {
sa := &corev1.ServiceAccount{
ObjectMeta: objectMeta,
}
// Set Synapse instance as the owner and controller
if err := ctrl.SetControllerReference(s, sa, r.Scheme); err != nil {
return &corev1.ServiceAccount{}, err
}
return sa, nil
}
// roleBindingForSynapse returns a synapse RoleBinding object
func (r *SynapseReconciler) roleBindingForSynapse(s *synapsev1alpha1.Synapse, objectMeta metav1.ObjectMeta) (client.Object, error) {
rb := &rbacv1.RoleBinding{
ObjectMeta: objectMeta,
RoleRef: rbacv1.RoleRef{
APIGroup: "rbac.authorization.k8s.io",
Kind: "ClusterRole",
Name: "system:openshift:scc:anyuid",
},
Subjects: []rbacv1.Subject{{
Kind: "ServiceAccount",
Name: objectMeta.Name,
Namespace: objectMeta.Namespace,
}},
}
// Set Synapse instance as the owner and controller
if err := ctrl.SetControllerReference(s, rb, r.Scheme); err != nil {
return &rbacv1.RoleBinding{}, err
}
return rb, nil
}
|
limitations under the License.
*/
|
imports_circular_b.py
|
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
import imports_circular_a
class ImportsCircularB(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
|
def _read(self):
self.initial = self._io.read_u1()
if self.initial == 65:
self.back_ref = imports_circular_a.ImportsCircularA(self._io)
|
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
|
deviceEnrollmentConfigurationsRequestBuilderGetRequestConfiguration.ts
|
import {DeviceEnrollmentConfigurationsRequestBuilderGetQueryParameters} from './deviceEnrollmentConfigurationsRequestBuilderGetQueryParameters';
import {RequestOption} from '@microsoft/kiota-abstractions';
/** Configuration for the request such as headers, query parameters, and middleware options. */
export class
|
{
/** Request headers */
public headers?: Record<string, string> | undefined;
/** Request options */
public options?: RequestOption[] | undefined;
/** Request query parameters */
public queryParameters?: DeviceEnrollmentConfigurationsRequestBuilderGetQueryParameters | undefined;
}
|
DeviceEnrollmentConfigurationsRequestBuilderGetRequestConfiguration
|
AsyncValue.js
|
var queueMicrotask = require("../../../runtime/queueMicrotask");
function AsyncValue() {
/**
* The data that was provided via call to resolve(data).
* This property is assumed to be public and available for inspection.
*/
this.w_ = undefined;
/**
* The data that was provided via call to reject(err)
* This property is assumed to be public and available for inspection.
*/
this.x_ = undefined;
/**
|
/**
* The state of the data holder (STATE_INITIAL, STATE_RESOLVED, or STATE_REJECTED)
*/
this.z_ = false;
}
function notifyCallbacks(asyncValue, err, value) {
var callbacks = asyncValue.y_;
if (callbacks) {
// clear out the registered callbacks (we still have reference to the original value)
asyncValue.y_ = undefined;
// invoke all of the callbacks and use their scope
for (var i = 0; i < callbacks.length; i++) {
// each callback is actually an object with "scope and "callback" properties
var callback = callbacks[i];
callback(err, value);
}
}
}
AsyncValue.prototype = {
/**
* Adds a callback to the queue. If there is not a pending request to load data
* and we have a "loader" then we will use that loader to request the data.
* The given callback will be invoked when there is an error or resolved data
* available.
*/
A_: function (callback) {
// Do we already have data or error?
if (this.z_) {
// invoke the callback immediately
return callback(this.x_, this.w_);
}
var callbacks = this.y_ || (this.y_ = []);
callbacks.push(callback);
},
/**
* This method will trigger any callbacks to be notified of rejection (error).
* If this data holder has a loader then the data holder will be returned to
* its initial state so that any future requests to load data will trigger a
* new load call.
*/
B_: function (err) {
if (this.z_) {
return;
}
// remember the error
this.x_ = err;
// Go to the rejected state if we don't have a loader.
// If we do have a loader then return to the initial state
// (we do this so that next call to done() will trigger load
// again in case the error was transient).
this.z_ = true;
// always notify callbacks regardless of whether or not we return to the initial state
notifyCallbacks(this, err, null);
},
/**
* This method will trigger any callbacks to be notified of data.
*/
C_: function (value) {
if (this.z_) {
return;
}
if (value && typeof value.then === "function") {
var asyncValue = this;
var finalPromise = value.then(function onFulfilled(value) {
queueMicrotask(asyncValue.C_.bind(asyncValue, value));
}, function onRejected(err) {
queueMicrotask(asyncValue.B_.bind(asyncValue, err));
});
if (finalPromise.done) {
finalPromise.done();
}
} else {
// remember the state
this.w_ = value;
// go to the resolved state
this.z_ = true;
// notify callbacks
notifyCallbacks(this, null, value);
}
}
};
module.exports = AsyncValue;
|
* The queue of callbacks that are waiting for data
*/
this.y_ = undefined;
|
operator.rs
|
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fmt;
/// Unary operators
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum UnaryOperator {
Plus,
Minus,
Not,
}
impl fmt::Display for UnaryOperator {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(match self {
UnaryOperator::Plus => "+",
UnaryOperator::Minus => "-",
UnaryOperator::Not => "NOT",
})
}
}
/// Binary operators
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum
|
{
Plus,
Minus,
Multiply,
Divide,
Modulus,
Gt,
Lt,
GtEq,
LtEq,
Eq,
NotEq,
And,
Or,
Like,
NotLike,
JsonGet,
JsonGetAsText,
JsonGetPath,
JsonGetPathAsText,
JsonContainsJson,
JsonContainedInJson,
JsonContainsField,
JsonContainsAnyFields,
JsonContainsAllFields,
JsonConcat,
JsonDeletePath,
JsonContainsPath,
JsonApplyPathPredicate,
}
impl fmt::Display for BinaryOperator {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(match self {
BinaryOperator::Plus => "+",
BinaryOperator::Minus => "-",
BinaryOperator::Multiply => "*",
BinaryOperator::Divide => "/",
BinaryOperator::Modulus => "%",
BinaryOperator::Gt => ">",
BinaryOperator::Lt => "<",
BinaryOperator::GtEq => ">=",
BinaryOperator::LtEq => "<=",
BinaryOperator::Eq => "=",
BinaryOperator::NotEq => "<>",
BinaryOperator::And => "AND",
BinaryOperator::Or => "OR",
BinaryOperator::Like => "LIKE",
BinaryOperator::NotLike => "NOT LIKE",
BinaryOperator::JsonGet => "->",
BinaryOperator::JsonGetAsText => "->>",
BinaryOperator::JsonGetPath => "#>",
BinaryOperator::JsonGetPathAsText => "#>>",
BinaryOperator::JsonContainsJson => "@>",
BinaryOperator::JsonContainedInJson => "<@",
BinaryOperator::JsonContainsField => "?",
BinaryOperator::JsonContainsAnyFields => "?|",
BinaryOperator::JsonContainsAllFields => "?&",
BinaryOperator::JsonConcat => "||",
BinaryOperator::JsonDeletePath => "#-",
BinaryOperator::JsonContainsPath => "@?",
BinaryOperator::JsonApplyPathPredicate => "@@",
})
}
}
|
BinaryOperator
|
config.rs
|
use graph::prelude::{
anyhow::{anyhow, Result},
info, serde_json, Logger,
};
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
use std::fs::read_to_string;
use url::Url;
const PRIMARY: &str = "primary";
const ANY_NAME: &str = ".*";
use crate::opt::Opt;
#[derive(Debug, Deserialize, Serialize)]
pub struct Config {
#[serde(rename = "store")]
stores: BTreeMap<String, Shard>,
deployment: Deployment,
ingestor: Ingestor,
}
fn validate_name(s: &str) -> Result<()> {
for c in s.chars() {
if !c.is_ascii_alphanumeric() || c == '-' {
return Err(anyhow!(
"names can only contain alphanumeric characters or '-', but `{}` contains `{}`",
s,
c
));
}
}
Ok(())
}
impl Config {
/// Check that the config is valid. Some defaults (like `pool_size`) will
/// be filled in from `opt` at the same time.
fn validate(&mut self, opt: &Opt) -> Result<()> {
if !self.stores.contains_key(PRIMARY) {
return Err(anyhow!("missing a primary store"));
}
for (key, shard) in self.stores.iter_mut() {
validate_name(key)?;
shard.validate(opt)?;
}
self.deployment.validate()?;
Ok(())
}
/// Load a configuration file if `opt.config` is set. If not, generate
/// a config from the command line arguments in `opt`
pub fn load(logger: &Logger, opt: &Opt) -> Result<Config> {
if let Some(config) = &opt.config {
info!(logger, "Reading configuration file `{}`", config);
let config = read_to_string(config)?;
let mut config: Config = toml::from_str(&config)?;
config.validate(opt)?;
Ok(config)
} else {
info!(
logger,
"Generating configuration from command line arguments"
);
Self::from_opt(opt)
}
}
fn from_opt(opt: &Opt) -> Result<Config> {
let ingestor = Ingestor::from_opt(opt);
let deployment = Deployment::from_opt(opt);
let mut stores = BTreeMap::new();
stores.insert(PRIMARY.to_string(), Shard::from_opt(opt)?);
Ok(Config {
stores,
deployment,
ingestor,
})
}
/// Genrate a JSON representation of the config.
pub fn to_json(&self) -> Result<String>
|
pub fn primary_store(&self) -> &Shard {
self.stores
.get(PRIMARY)
.expect("a validated config has a primary store")
}
}
#[derive(Debug, Deserialize, Serialize)]
pub struct Shard {
pub connection: String,
#[serde(default = "one")]
pub weight: usize,
#[serde(default)]
pub pool_size: u32,
pub replicas: BTreeMap<String, Replica>,
}
fn check_pool_size(pool_size: u32, connection: &str) -> Result<()> {
if pool_size < 2 {
Err(anyhow!(
"connection pool size must be at least 2, but is {} for {}",
pool_size,
connection
))
} else {
Ok(())
}
}
impl Shard {
fn validate(&mut self, opt: &Opt) -> Result<()> {
self.connection = shellexpand::env(&self.connection)?.into_owned();
if self.pool_size == 0 {
self.pool_size = opt.store_connection_pool_size;
}
check_pool_size(self.pool_size, &self.connection)?;
for (name, replica) in self.replicas.iter_mut() {
validate_name(name)?;
replica.validate(opt)?;
}
Ok(())
}
fn from_opt(opt: &Opt) -> Result<Self> {
let postgres_url = opt
.postgres_url
.as_ref()
.expect("validation checked that postgres_url is set");
check_pool_size(opt.store_connection_pool_size, &postgres_url)?;
let mut replicas = BTreeMap::new();
for (i, host) in opt.postgres_secondary_hosts.iter().enumerate() {
let replica = Replica {
connection: replace_host(&postgres_url, &host),
weight: opt.postgres_host_weights.get(i + 1).cloned().unwrap_or(1),
pool_size: opt.store_connection_pool_size,
};
replicas.insert(format!("replica{}", i + 1), replica);
}
Ok(Self {
connection: postgres_url.clone(),
weight: opt.postgres_host_weights.get(0).cloned().unwrap_or(1),
pool_size: opt.store_connection_pool_size,
replicas,
})
}
}
#[derive(Debug, Deserialize, Serialize)]
pub struct Replica {
pub connection: String,
#[serde(default = "one")]
pub weight: usize,
#[serde(default = "zero")]
pub pool_size: u32,
}
impl Replica {
fn validate(&mut self, opt: &Opt) -> Result<()> {
self.connection = shellexpand::env(&self.connection)?.into_owned();
if self.pool_size == 0 {
self.pool_size = opt.store_connection_pool_size;
}
check_pool_size(self.pool_size, &self.connection)?;
Ok(())
}
}
#[derive(Debug, Deserialize, Serialize)]
struct Deployment {
#[serde(rename = "rule")]
rules: Vec<Rule>,
}
impl Deployment {
fn validate(&self) -> Result<()> {
if self.rules.is_empty() {
return Err(anyhow!(
"there must be at least one deployment rule".to_string()
));
}
let mut default_rule = false;
for rule in &self.rules {
rule.validate()?;
if default_rule {
return Err(anyhow!("rules after a default rule are useless"));
}
default_rule = rule.is_default();
}
if !default_rule {
return Err(anyhow!(
"the rules do not contain a default rule that matches everything"
));
}
Ok(())
}
// This needs to be moved to some sort of trait
#[allow(dead_code)]
fn place(&self, name: &str, network: &str, default: &str) -> Option<(&str, Vec<String>)> {
if self.rules.is_empty() {
// This can only happen if we have only command line arguments and no
// configuration file
Some((PRIMARY, vec![default.to_string()]))
} else {
self.rules
.iter()
.find(|rule| rule.matches(name, network))
.map(|rule| (rule.store.as_str(), rule.indexers.clone()))
}
}
fn from_opt(_: &Opt) -> Self {
Self { rules: vec![] }
}
}
#[derive(Debug, Deserialize, Serialize)]
struct Rule {
#[serde(rename = "match", default)]
pred: Predicate,
#[serde(default = "primary_store")]
store: String,
indexers: Vec<String>,
}
impl Rule {
fn is_default(&self) -> bool {
self.pred.matches_anything()
}
fn matches(&self, name: &str, network: &str) -> bool {
self.pred.matches(name, network)
}
fn validate(&self) -> Result<()> {
if self.indexers.is_empty() {
return Err(anyhow!("useless rule without indexers"));
}
Ok(())
}
}
#[derive(Debug, Deserialize, Serialize)]
struct Predicate {
#[serde(with = "serde_regex", default = "any_name")]
name: Regex,
network: Option<String>,
}
impl Predicate {
fn matches_anything(&self) -> bool {
self.name.as_str() == ANY_NAME && self.network.is_none()
}
pub fn matches(&self, name: &str, network: &str) -> bool {
if let Some(n) = &self.network {
if n != network {
return false;
}
}
match self.name.find(name) {
None => false,
Some(m) => m.as_str() == name,
}
}
}
impl Default for Predicate {
fn default() -> Self {
Predicate {
name: any_name(),
network: None,
}
}
}
#[derive(Debug, Deserialize, Serialize)]
struct Ingestor {
node: String,
}
impl Ingestor {
fn from_opt(opt: &Opt) -> Self {
// If we are not the block ingestor, set the node name
// to something that is definitely not our node_id
if opt.disable_block_ingestor {
Ingestor {
node: format!("{} is not ingesting", opt.node_id),
}
} else {
Ingestor {
node: opt.node_id.clone(),
}
}
}
}
/// Replace the host portion of `url` and return a new URL with `host`
/// as the host portion
///
/// Panics if `url` is not a valid URL (which won't happen in our case since
/// we would have paniced before getting here as `url` is the connection for
/// the primary Postgres instance)
fn replace_host(url: &str, host: &str) -> String {
let mut url = match Url::parse(url) {
Ok(url) => url,
Err(_) => panic!("Invalid Postgres URL {}", url),
};
if let Err(e) = url.set_host(Some(host)) {
panic!("Invalid Postgres url {}: {}", url, e.to_string());
}
url.into_string()
}
// Various default functions for deserialization
fn any_name() -> Regex {
Regex::new(ANY_NAME).unwrap()
}
fn primary_store() -> String {
PRIMARY.to_string()
}
fn one() -> usize {
1
}
fn zero() -> u32 {
0
}
|
{
// It would be nice to produce a TOML representation, but that runs
// into this error: https://github.com/alexcrichton/toml-rs/issues/142
// and fixing it as described in the issue didn't fix it. Since serializing
// this data isn't crucial and only needed for debugging, we'll
// just stick with JSON
Ok(serde_json::to_string_pretty(&self)?)
}
|
program.go
|
//+build !disable_gl
/*
Copyright 2020 The goARRG Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"os"
"time"
"goarrg.com/debug"
)
type program struct {
timer *time.Timer
}
func (p *program) Init() error {
p.timer = time.NewTimer(time.Millisecond * 500)
return nil
}
func (p *program) Update(deltaTime float64) {
|
if err != nil {
debug.EPrint(err)
os.Exit(1)
}
p.timer.Reset(time.Millisecond * 500)
default:
}
}
func (p *program) Shutdown() bool {
return true
}
func (p *program) Destroy() {
}
|
select {
case <-p.timer.C:
err := PlaySound("test2.wav")
|
server.js
|
// When false nothing will be logged.
// Configuration goes here.
require('./config.js');
if (!configuration.DEBUG) {
console.log = function() {};
}
const Discord = require("discord.js");
const fs = require('fs');
let net;
let netOptions = {};
if (configuration.tlsEnabled) {
net = require('tls');
netOptions = {
key: fs.readFileSync(configuration.tlsOptions.keyPath),
cert: fs.readFileSync(configuration.tlsOptions.certPath)
}
} else {
net = require('net');
}
let request;
if (configuration.handleCode) {
request = require('request');
}
//
// Let's ready some variables and stuff we will use later on.
//
// Object which will contain channel information.
let ircDetails = {
DMserver: {
lastPRIVMSG: []
}
};
// Since we want a seperate connection for each discord server we will need to store our sockets.
let ircClients = [];
// Simply used to give each new socket a unique number.
let ircClientCount = 0;
// This is used to make sure that if discord reconnects not everything is wiped.
let discordFirstConnection = true;
// Max line lenght for irc messages.
const maxLineLength = 510;
//
// Generic functions
//
// Function that parses irc messages.
// Shamelessly stolen from node-irc https://github.com/aredridel/node-ircd
function parseMessage(line) {
let message = {};
let m = /(:[^ ]+ )?([A-Z0-9]+)(?: (.*))?/i.exec(line);
if (!m) {
message['error'] = 'Unable to parse message';
} else {
let i;
if (m[3] && (i = m[3].indexOf(':')) !== -1) {
let rest = m[3].slice(i + 1);
message.params = m[3].slice(0, i - 1).split(' ');
message.params.push(rest);
} else {
if (m[3]) {
message.params = m[3].split(' ');
} else {
message.params = [];
}
}
if (m[2]) {
message.command = m[2].toUpperCase();
}
if (m[1]) {
message.sender = m[1];
}
}
return message;
}
// Returns a number based on the discord server that increases per call.
// Used to make fairly sure nicknames on irc end up being unique after being scrubbed.
// Make nicknames work for irc.
function ircNickname(discordDisplayName, botuser, discriminator) {
const replaceRegex = /[^a-zA-Z0-9_\\[\]\{\}\^`\|]/g;
const shortenRegex = /_{1,}/g;
if (replaceRegex.test(discordDisplayName)) {
let newDisplayname = `${discordDisplayName.replace(replaceRegex, '_')}${discriminator}`;
newDisplayname = newDisplayname.replace(shortenRegex, '_');
return botuser ? `${newDisplayname}[BOT]` : newDisplayname;
} else {
return botuser ? `${discordDisplayName}[BOT]` : discordDisplayName;
}
}
// Parses discord lines to make them work better on irc.
function parseDiscordLine(line, discordID) {
// Discord markdown parsing the lazy way. Probably fails in a bunch of different ways but at least it is easy.
line = line.replace(/\*\*(.*?)\*\*/g, '\x02$1\x0F');
line = line.replace(/\*(.*?)\*/g, '\x1D$1\x0F');
line = line.replace(/^_(.*?)\_/g, '\x01ACTION $1\x01');
line = line.replace(/__(.*?)__/g, '\x1F$1\x0F');
// With the above regex we might end up with to many end characters. This replaces the,
line = line.replace(/\x0F{2,}/g, '\x0F');
// Now let's replace mentions with names we can recognize.
const mentionUserRegex = /(<@!?\d{1,}?>)/g;
const mentionUserFound = line.match(mentionUserRegex);
if (mentionUserFound) {
mentionUserFound.forEach(function(mention) {
const userID = mention.replace(/<@!?(\d{1,}?)>/, '$1');
const memberObject = discordClient.guilds.get(discordID).members.get(userID);
const displayName = memberObject.displayName;
const isBot = memberObject.user.bot;
const discriminator = memberObject.user.discriminator;
const userName = ircNickname(displayName, isBot, discriminator);
const replaceRegex = new RegExp(mention, 'g');
if (userName) {
line = line.replace(replaceRegex, `@${userName}`);
}
});
}
// Now let's do this again and replace mentions with roles we can recognize.
const mentionRoleRegex = /(<@&\d{1,}?>)/g;
const mentionRoleFound = line.match(mentionRoleRegex);
if (mentionRoleFound) {
mentionRoleFound.forEach(function(mention) {
const roleID = mention.replace(/<@&(\d{1,}?)>/, '$1');
const roleObject = discordClient.guilds.get(discordID).roles.get(roleID);
const replaceRegex = new RegExp(mention, 'g');
if (roleObject) {
const name = roleObject.name;
line = line.replace(replaceRegex, `@${name}`);
}
});
}
// Channels are also a thing!.
const mentionChannelRegex = /(<#\d{1,}?>)/g;
const mentionChannelFound = line.match(mentionChannelRegex);
if (mentionChannelFound) {
mentionChannelFound.forEach(function(mention) {
const channelID = mention.replace(/<#(\d{1,}?)>/, '$1');
const channelObject = discordClient.guilds.get(discordID).channels.get(channelID);
const replaceRegex = new RegExp(mention, 'g');
if (channelObject) {
const name = channelObject.name;
line = line.replace(replaceRegex, `@${name}`);
}
});
}
return line;
}
// Parse irc lines to make them work better on discord.
function parseIRCLine(line, discordID, channel) {
line = line.replace(/\001ACTION(.*?)\001/g, '_$1_');
const mentionDiscordRegex = /(@.{1,}?\s)/g;
let mentionDiscordFound = line.match(mentionDiscordRegex);
const mentionIrcRegex = /(^.{1,}?:)/g;
let mentionIrcFound = line.match(mentionIrcRegex);
let mentionFound;
if (mentionDiscordFound && mentionIrcFound) {
mentionFound = mentionDiscordFound.concat(mentionIrcFound);
} else if (mentionDiscordFound) {
mentionFound = mentionDiscordFound;
} else {
mentionFound = mentionIrcFound;
}
if (mentionFound) {
mentionFound.forEach(function(mention) {
const regexDiscordMention = /@(.{1,}?)\s/;
const regexIrcMention = /^(.{1,}?):/;
let userNickname;
if (regexDiscordMention.mention) {
userNickname = mention.replace(regexDiscordMention, '$1');
} else {
userNickname = mention.replace(regexIrcMention, '$1');
}
if (ircDetails[discordID].channels[channel].members.hasOwnProperty(userNickname)) {
const userID = ircDetails[discordID].channels[channel].members[userNickname].id;
const replaceRegex = new RegExp(mention, 'g');
line = line.replace(replaceRegex, `<@!${userID}> `);
}
});
}
return line;
}
//
// Discord related functionality.
//
// Create our discord client.
let discordClient = new Discord.Client({
fetchAllMembers: true,
sync: true
});
// Log into discord using the token defined in config.js
discordClient.login(configuration.discordToken);
//
// Various events used for debugging.
//
// Will log discord debug information.
discordClient.on('debug', function(info) {
console.log('debug', info);
});
// When debugging we probably want to know about errors as well.
discordClient.on('error', function(info) {
console.log('error', info);
sendGeneralNotice('Discord error.');
});
// Emitted when the Client tries to reconnect after being disconnected.
discordClient.on('reconnecting', function() {
console.log('reconnecting');
sendGeneralNotice('Reconnecting to Discord.');
});
// Emitted whenever the client websocket is disconnected.
discordClient.on('disconnect', function(event) {
console.log('disconnected', event);
sendGeneralNotice('Discord has been disconnected.');
});
// Emitted for general warnings.
discordClient.on('warn', function(info) {
console.log('warn', info);
});
// Discord is ready.
discordClient.on('ready', function() {
// This is probably not needed, but since sometimes things are weird with discord.
discordClient.guilds.array().forEach(function(guild) {
guild.fetchMembers();
guild.sync();
});
console.log(`Logged in as ${discordClient.user.username}!`);
// Lets grab some basic information we will need eventually.
// But only do so if this is the first time connecting.
if (discordFirstConnection) {
discordFirstConnection = false;
discordClient.guilds.array().forEach(function(guild) {
const guildID = guild.id;
if (!ircDetails.hasOwnProperty(guildID)) {
ircDetails[guildID] = {
lastPRIVMSG: [],
channels: {},
members: {}
};
}
guild.members.array().forEach(function(member) {
const ircDisplayName = ircNickname(member.displayName, member.user.bot, member.user.discriminator);
ircDetails[guildID].members[ircDisplayName] = member.id;
});
});
discordClient.channels.array().forEach(function(channel) {
// Of course only for channels.
if (channel.type === 'text') {
const guildID = channel.guild.id,
channelName = channel.name,
channelID = channel.id,
channelTopic = channel.topic || 'No topic';
ircDetails[guildID].channels[channelName] = {
id: channelID,
joined: [],
topic: channelTopic
};
}
});
// Now that is done we can start the irc server side of things.
ircServer.listen(configuration.ircServer.listenPort);
} else {
sendGeneralNotice('Discord connection has been restored.');
}
});
//
// Acting on events
//
// There are multiple events that indicate a users is no longer on the server.
// We abuse the irc QUIT: message for this even if people are banned.
function guildMemberNoMore(guildID, ircDisplayName, noMoreReason) {
let found = false;
// First we go over the channels.
for (let channel in ircDetails[guildID].channels) {
if (ircDetails[guildID].channels.hasOwnProperty(channel) && ircDetails[guildID].channels[channel].joined.length > 0) {
let channelMembers = ircDetails[guildID].channels[channel].members;
// Within the channels we go over the members.
if (channelMembers.hasOwnProperty(ircDisplayName)) {
if (!found) {
let memberDetails = ircDetails[guildID].channels[channel].members[ircDisplayName];
console.log(`User ${ircDisplayName} quit ${noMoreReason}`);
ircDetails[guildID].channels[channel].joined.forEach(function(socketID) {
sendToIRC(guildID, `:${ircDisplayName}!${memberDetails.id}@whatever QUIT :${noMoreReason}\r\n`, socketID);
});
found = true;
}
delete ircDetails[guildID].channels[channel].members[ircDisplayName];
}
}
}
if (noMoreReason !== 'User gone offline') {
delete ircDetails[guildID].members[ircDisplayName];
}
}
function guildMemberCheckChannels(guildID, ircDisplayName, guildMember) {
// First we go over the channels.
for (let channel in ircDetails[guildID].channels) {
if (ircDetails[guildID].channels.hasOwnProperty(channel) && ircDetails[guildID].channels[channel].joined.length > 0) {
let isInDiscordChannel = false;
let isCurrentlyInIRC = false;
let channelDetails = ircDetails[guildID].channels[channel];
let channelMembers = channelDetails.members;
let channelID = channelDetails.id;
//Let's check the discord channel.
let discordMemberArray = discordClient.guilds.get(guildID).channels.get(channelID).members.array();
discordMemberArray.forEach(function(discordMember) {
if (guildMember.displayName === discordMember.displayName && (guildMember.presence.status !== 'offline' || configuration.showOfflineUsers)) {
isInDiscordChannel = true;
}
});
// Within the channels we go over the members.
if (channelMembers.hasOwnProperty(ircDisplayName)) {
// User found for channel.
isCurrentlyInIRC = true;
}
// If the user is in the discord channel but not irc we will add the user.
if (!isCurrentlyInIRC && isInDiscordChannel) {
ircDetails[guildID].channels[channel].members[ircDisplayName] = {
discordName: guildMember.displayName,
discordState: guildMember.presence.status,
ircNick: ircDisplayName,
id: guildMember.id
};
console.log(`User ${ircDisplayName} joined ${channel}`);
ircDetails[guildID].channels[channel].joined.forEach(function(socketID) {
sendToIRC(guildID, `:${ircDisplayName}!${guildMember.id}@whatever JOIN #${channel}\r\n`, socketID);
const socketDetails = getSocketDetails(socketID);
if (guildMember.presence === 'idle' && socketDetails.awayNotify) {
console.log(`User ${ircDisplayName} is away: Idle`);
sendToIRC(guildID, `:${ircDisplayName}!${guildMember.id}@whatever AWAY :Idle\r\n`, socketID);
}
if (guildMember.presence === 'dnd' && socketDetails.awayNotify) {
console.log(`User ${ircDisplayName} is away: Do not disturb`);
sendToIRC(guildID, `:${ircDisplayName}!${guildMember.id}@whatever AWAY :Do not disturb\r\n`, socketID);
}
// Unlikely to happen, but just to be sure.
if (guildMember.presence === 'offline' && configuration.showOfflineUsers && socketDetails.awayNotify) {
console.log(`User ${ircDisplayName} is offline`);
sendToIRC(guildID, `:${ircDisplayName}!${guildMember.id}@whatever AWAY :Offline\r\n`, socketID);
}
});
}
// If the user is currently in irc but not in the discord channel they have left the channel.
if (isCurrentlyInIRC && !isInDiscordChannel) {
ircDetails[guildID].channels[channel].joined.forEach(function(socketID) {
console.log(`User ${ircDisplayName} left ${channel}`);
sendToIRC(guildID, `:${ircDisplayName}!${guildMember.id}@whatever PART #${channel}\r\n`, socketID);
delete ircDetails[guildID].channels[channel].members[ircDisplayName];
});
}
}
}
}
function guildMemberNickChange(guildID, oldIrcDisplayName, newIrcDisplayName, newDiscordDisplayName) {
// First we go over the channels.
let foundInChannels = false;
let memberId;
ircDetails[guildID].members[newIrcDisplayName] = ircDetails[guildID].members[oldIrcDisplayName];
delete ircDetails[guildID].members[oldIrcDisplayName];
for (let channel in ircDetails[guildID].channels) {
if (ircDetails[guildID].channels.hasOwnProperty(channel) && ircDetails[guildID].channels[channel].joined.length > 0) {
let channelDetails = ircDetails[guildID].channels[channel];
let channelMembers = channelDetails.members;
// Within the channels we go over the members.
if (channelMembers.hasOwnProperty(oldIrcDisplayName)) {
let tempMember = channelMembers[oldIrcDisplayName];
tempMember.displayName = newDiscordDisplayName;
tempMember.ircNick = newIrcDisplayName;
memberId = tempMember.id;
delete ircDetails[guildID].channels[channel].members[oldIrcDisplayName];
ircDetails[guildID].channels[channel].members[oldIrcDisplayName] = tempMember;
foundInChannels = true;
}
}
}
if (foundInChannels) {
console.log(`Changing nickname ${oldIrcDisplayName} into ${newIrcDisplayName}`);
sendToIRC(guildID, `:${oldIrcDisplayName}!${memberId}@whatever NICK ${newIrcDisplayName}\r\n`);
}
}
discordClient.on('guildMemberRemove', function(GuildMember) {
if (ircClients.length > 0) {
console.log('guildMemberRemove');
const guildID = GuildMember.guild.id;
const isBot = GuildMember.user.bot;
const discriminator = GuildMember.user.discriminator;
const ircDisplayName = ircNickname(GuildMember.displayName, isBot, discriminator);
guildMemberNoMore(guildID, ircDisplayName, 'User removed');
}
});
discordClient.on('presenceUpdate', function(oldMember, newMember) {
if (ircClients.length > 0) {
const guildID = newMember.guild.id;
const isBot = newMember.user.bot;
const discriminator = newMember.user.discriminator;
const ircDisplayName = ircNickname(newMember.displayName, isBot, discriminator);
const oldPresenceState = oldMember.presence.status;
const newPresenceState = newMember.presence.status;
if (oldPresenceState === 'offline' && !configuration.showOfflineUsers) {
guildMemberCheckChannels(guildID, ircDisplayName, newMember);
} else if (newPresenceState === 'offline' && !configuration.showOfflineUsers) {
guildMemberNoMore(guildID, ircDisplayName, 'User gone offline');
} else if (configuration.showOfflineUsers) {
ircClients.forEach(function(socket) {
if (socket.awayNotify) {
// Technically we could just do socket.writeline for these. But for consistency we go through the sendToIRC function.
if (newPresenceState === 'offline' && configuration.showOfflineUsers) {
sendToIRC(guildID, `:${ircDisplayName}!${newMember.id}@whatever AWAY :Offline\r\n`, socket.ircid);
} else if (newPresenceState === 'dnd') {
sendToIRC(guildID, `:${ircDisplayName}!${newMember.id}@whatever AWAY :Do not disturb\r\n`, socket.ircid);
} else if (newPresenceState === 'idle') {
sendToIRC(guildID, `:${ircDisplayName}!${newMember.id}@whatever AWAY :Idle\r\n`, socket.ircid);
} else if (oldPresenceState !== 'offline' && newPresenceState === 'online') {
sendToIRC(guildID, `:${ircDisplayName}!${newMember.id}@whatever AWAY\r\n`, socket.ircid);
}
}
});
}
}
});
discordClient.on('guildMemberUpdate', function(oldMember, newMember) {
if (ircClients.length > 0) {
console.log('guildMemberUpdate');
const guildID = newMember.guild.id;
const oldIsBot = oldMember.user.bot;
const newIsBot = newMember.user.bot;
const discriminator = newMember.user.discriminator;
const oldIrcDisplayName = ircNickname(oldMember.displayName, oldIsBot, discriminator);
const newIrcDisplayName = ircNickname(newMember.displayName, newIsBot, discriminator);
const newDiscordDisplayName = newMember.displayName;
if (oldIrcDisplayName !== newIrcDisplayName) {
if (newMember.id === discordClient.user.id) {
sendToIRC(newMember.guild.id, `:${oldIrcDisplayName}!${discordClient.user.id}@whatever NICK ${newIrcDisplayName}\r\n`);
} else {
guildMemberNickChange(guildID, oldIrcDisplayName, newIrcDisplayName, newDiscordDisplayName);
}
} else {
guildMemberCheckChannels(guildID, newIrcDisplayName, newMember);
}
}
});
discordClient.on('guildMemberAdd', function(GuildMember) {
if (ircClients.length > 0) {
console.log('guildMemberAdd');
const guildID = GuildMember.guild.id;
const isBot = GuildMember.user.bot;
const discriminator = GuildMember.user.discriminator;
const ircDisplayName = ircNickname(GuildMember.displayName, isBot, discriminator);
guildMemberCheckChannels(guildID, ircDisplayName, GuildMember);
}
});
discordClient.on('channelCreate', function(newChannel) {
if (newChannel.type === 'text') {
const discordServerId = newChannel.guild.id;
ircDetails[discordServerId].channels[newChannel.name] = {
id: newChannel.id,
members: {},
topic: newChannel.topic || 'No topic',
joined: []
};
}
});
discordClient.on('channelDelete', function(deletedChannel) {
if (deletedChannel.type === 'text') {
const discordServerId = deletedChannel.guild.id;
if (ircDetails[discordServerId].channels[deletedChannel.name].joined.length > 0) {
const PartAlertMessage = `:discordIRCd!notReallyA@User PRIVMSG #${deletedChannel.name} :#${deletedChannel.name} has been deleted \r\n`;
sendToIRC(discordServerId, PartAlertMessage);
const joinedSockets = ircDetails[discordServerId].channels[deletedChannel.name].joined;
joinedSockets.forEach(function(socketID) {
// First we inform the user in the old channelContent
partCommand(deletedChannel.name, discordServerId, socketID);
});
}
// Finally remove the channel from the list.
delete ircDetails[discordServerId].channels[deletedChannel.name];
}
});
discordClient.on('channelUpdate', function(oldChannel, newChannel) {
const discordServerId = oldChannel.guild.id;
console.log('channel updated');
if (oldChannel.type === 'text') {
if (oldChannel.name !== newChannel.name) {
console.log(`channel name changed from #${oldChannel.name} to #${newChannel.name}`);
ircDetails[discordServerId].channels[newChannel.name] = {
id: newChannel.id,
members: {},
topic: newChannel.topic || 'No topic',
joined: []
};
if (ircDetails[discordServerId].channels[oldChannel.name].joined.length > 0) {
const PartAlertMessage = `:discordIRCd!notReallyA@User PRIVMSG #${oldChannel.name} :#${oldChannel.name} has been renamed to #${newChannel.name} \r\n`;
sendToIRC(discordServerId, PartAlertMessage);
const joinedSockets = ircDetails[discordServerId].channels[oldChannel.name].joined;
joinedSockets.forEach(function(socketID) {
// First we inform the user in the old channelContent
partCommand(oldChannel.name, discordServerId, socketID);
joinCommand(newChannel.name, discordServerId, socketID);
});
}
// Delete the old one.
delete ircDetails[discordServerId].channels[oldChannel.name];
}
}
// Simple topic change.
if (oldChannel.topic !== newChannel.topic) {
const topic = newChannel.topic || 'No topic';
ircClients.forEach(function(socket) {
if (socket.discordid === discordServerId && ircDetails[discordServerId].channels[newChannel.name].joined.indexOf(socket.ircid) > -1) {
const topicMSG = `:noboyknows!orCares@whatever TOPIC #${newChannel.name} :${topic}\r\n`;
sendToIRC(discordServerId, topicMSG, socket.ircid);
}
});
}
});
// Processing received messages
discordClient.on('message', function(msg) {
if (ircClients.length > 0 && msg.channel.type === 'text') {
const discordServerId = msg.guild.id;
// Webhooks don't have a member.
let authorDisplayName;
if (msg.member) {
authorDisplayName = msg.member.displayName;
} else {
authorDisplayName = msg.author.username;
}
const isBot = msg.author.bot;
const discriminator = msg.author.discriminator;
const authorIrcName = ircNickname(authorDisplayName, isBot, discriminator);
const channelName = msg.channel.name;
// Doesn't really matter socket we pick this from as long as it is connected to the discord server.
let ownNickname = getSocketDetails(ircDetails[discordServerId].channels[channelName].joined[0]).nickname;
let messageContent = msg.content;
if (configuration.handleCode) {
const codeRegex = /```(.*?)\r?\n([\s\S]*?)```/;
const replaceRegex = /```.*?\r?\n[\s\S]*?```/;
if (codeRegex.test(messageContent)) {
const codeDetails = messageContent.match(codeRegex);
// In the future I want to include the url in the message. But since the call to gist is async that doesn't fit the current structure.
messageContent = messageContent.replace(replaceRegex, '');
let extension;
let language;
if (codeDetails[1]) {
language = codeDetails[1].toLowerCase();
switch (language) {
case 'javascript':
extension = 'js';
break;
case 'html':
extension = 'html';
break;
case 'css':
extension = 'css';
break;
case 'xml':
extension = 'xml';
break;
case 'python':
extension = 'py';
break;
case 'c#':
extension = 'cs';
break;
case 'c++':
extension = 'cc';
break;
case 'php':
extension = 'php';
break;
default:
extension = 'txt';
break;
}
} else {
extension = 'txt';
language = 'unknown';
}
const gistFileName = `${authorIrcName}_code.${extension}`;
let postBody = {
description: `Code block on ${msg.guild.name} in channel ${channelName} from ${authorIrcName}`,
public: false,
files: {
}
};
postBody.files[gistFileName] = {
'content': codeDetails[2]
};
let gistOptions = {
url: 'https://api.github.com/gists',
headers: {
'Authorization': `token ${configuration.githubToken}`,
'User-Agent': 'discordIRCd'
},
method: 'POST',
json: postBody
};
request(gistOptions, function(error, response, body) {
if (error) {
console.log('Gist error:', error);
}
if (!error && response.statusCode === 201) {
console.log(body.html_url);
const gistMessage = `:${authorIrcName}!${msg.author.id}@whatever PRIVMSG #${channelName} :${body.html_url}\r\n`;
ircDetails[discordServerId].channels[channelName].joined.forEach(function(socketID) {
sendToIRC(discordServerId, gistMessage, socketID);
});
}
if (!error && response.statusCode !== 201) {
console.log('Something went wrong on the gist side of things:', response.statusCode);
}
});
}
}
let memberMentioned = false;
let memberDirectlyMentioned = false;
const ownGuildMember = discordClient.guilds.get(discordServerId).members.get(discordClient.user.id);
if (msg.mentions.roles.array().length > 0) {
ownGuildMember.roles.array().forEach(function(role) {
if (msg.isMentioned(role)) {
memberMentioned = true;
}
});
}
if (msg.mentions.everyone) {
memberMentioned = true;
}
// Only add it if the nickname is known. If it is undefined the client is not in the channel and will be notified through PM anyway.
if (memberMentioned && ownNickname) {
messageContent = `${ownNickname}: ${messageContent}`;
}
if (msg.mentions.users.array().length > 0) {
if (msg.isMentioned(ownGuildMember)) {
memberDirectlyMentioned = true;
}
}
// Only act on text channels and if the user has joined them in irc or if the user is mentioned in some capacity.
if (ircDetails[discordServerId].channels[channelName].joined.length > 0 || memberMentioned || memberDirectlyMentioned) {
// IRC does not handle newlines. So we split the message up per line and send them seperatly.
const messageArray = messageContent.split(/\r?\n/);
const attachmentArray = msg.attachments.array();
if (attachmentArray.length > 0) {
attachmentArray.forEach(function(attachment) {
const filename = attachment.filename;
const url = attachment.url;
const attachmentLine = `${filename}: ${url}`;
messageArray.push(attachmentLine);
});
}
messageArray.forEach(function(line) {
const messageTemplate = `:${authorIrcName}!${msg.author.id}@whatever PRIVMSG #${channelName} :`;
const messageTemplateLength = messageTemplate.length;
const remainingLength = maxLineLength - messageTemplateLength;
const matchRegex = new RegExp(`[\\s\\S]{1,${remainingLength}}`, 'g');
const linesArray = line.match(matchRegex) || [];
linesArray.forEach(function(sendLine) {
// Trying to prevent messages from irc echoing back and showing twice.
if (ircDetails[discordServerId].lastPRIVMSG.indexOf(sendLine) < 0) {
const lineToSend = parseDiscordLine(sendLine, discordServerId);
const message = `${messageTemplate}${lineToSend}\r\n`;
ircDetails[discordServerId].channels[channelName].joined.forEach(function(socketID) {
sendToIRC(discordServerId, message, socketID);
});
// Let's make people aware they are mentioned in channels they are not in.
if (memberMentioned || memberDirectlyMentioned) {
ircClients.forEach(function(socket) {
if (socket.discordid === discordServerId && ircDetails[discordServerId].channels[channelName].joined.indexOf(socket.ircid) === -1) {
const message = `:discordIRCd!notReallyA@User PRIVMSG discordIRCd :#${channelName}: ${lineToSend}\r\n`;
sendToIRC(discordServerId, message, socket.ircid);
}
});
}
}
});
});
}
}
if (ircClients.length > 0 && msg.channel.type === 'dm') {
const discordServerId = 'DMserver';
const authorDisplayName = msg.author.username;
const authorIsBot = msg.author.bot;
const authorDiscriminator = msg.author.discriminator;
const authorIrcName = ircNickname(authorDisplayName, authorIsBot, authorDiscriminator);
const recipientIsBot = msg.channel.recipient.bot;
const recipientDiscriminator = msg.channel.recipient.discriminator;
const recipient = ircNickname(msg.channel.recipient.username, recipientIsBot, recipientDiscriminator);
let ownNickname;
ircClients.forEach(function(socket) {
if (socket.discordid === discordServerId) {
ownNickname = socket.nickname;
}
});
let messageTemplate;
if (authorIrcName === ownNickname) {
messageTemplate = `:${authorIrcName}!${msg.author.id}@whatever PRIVMSG ${recipient} :`;
} else {
messageTemplate = `:${authorIrcName}!${msg.author.id}@whatever PRIVMSG ${ownNickname} :`;
}
// IRC does not handle newlines. So we split the message up per line and send them seperatly.
const messageArray = msg.content.split(/\r?\n/);
messageArray.forEach(function(line) {
const messageTemplateLength = messageTemplate.length;
const remainingLength = maxLineLength - messageTemplateLength;
const matchRegex = new RegExp(`[\\s\\S]{1,${remainingLength}}`, 'g');
const linesArray = line.match(matchRegex) || [];
linesArray.forEach(function(sendLine) {
// Trying to prevent messages from irc echoing back and showing twice.
if (ircDetails[discordServerId].lastPRIVMSG.indexOf(sendLine) < 0) {
const lineToSend = parseDiscordLine(sendLine, discordServerId);
const message = `${messageTemplate}${lineToSend}\r\n`;
sendToIRC(discordServerId, message);
}
});
});
const attachmentArray = msg.attachments.array();
if (attachmentArray.length > 0) {
attachmentArray.forEach(function(attachment) {
const filename = attachment.filename;
const url = attachment.url;
const attachmentLine = `${filename}: ${url}`;
const message = `${messageTemplate}${attachmentLine}\r\n`;
sendToIRC(discordServerId, message);
});
}
}
});
// Join command given, let's join the channel.
function joinCommand(channel, discordID, socketID) {
let members = '';
let memberListLines = [];
const nickname = ircDetails[discordID].ircDisplayName;
const memberlistTemplate = `:${configuration.ircServer.hostname} 353 ${nickname} @ #${channel} :`;
const memberlistTemplateLength = memberlistTemplate.length;
if (ircDetails[discordID].channels.hasOwnProperty(channel)) {
const channelProperties = ircDetails[discordID].channels[channel];
const channelContent = discordClient.channels.get(channelProperties.id);
ircDetails[discordID].channels[channel].joined.push(socketID);
ircDetails[discordID].channels[channel]['members'] = {};
const channelTopic = channelProperties.topic;
channelContent.members.array().forEach(function(member) {
const isBot = member.user.bot;
const discriminator = member.user.discriminator;
const displayMember = ircNickname(member.displayName, isBot, discriminator);
if (member.presence.status === 'online' ||
member.presence.status === 'idle' ||
member.presence.status === 'dnd' ||
(member.presence.status === 'offline' && configuration.showOfflineUsers)) {
ircDetails[discordID].channels[channel].members[displayMember] = {
discordName: member.displayName,
discordState: member.presence.status,
ircNick: displayMember,
id: member.id
};
const membersPlusDisplayMember = `${members} ${displayMember}`;
const newLineLenght = membersPlusDisplayMember.length;
const combinedLineLength = newLineLenght + memberlistTemplateLength;
if (combinedLineLength < maxLineLength) {
members = `${members} ${displayMember}`;
} else {
memberListLines.push(members);
members = displayMember;
}
}
});
memberListLines.push(members);
const joinMSG = `:${nickname} JOIN #${channel}\r\n`;
console.log(joinMSG);
sendToIRC(discordID, joinMSG, socketID);
// Setting the topic.
const topicMSG = `:${configuration.ircServer.hostname} 332 ${nickname} #${channel} :${channelTopic}\r\n`;
console.log(topicMSG);
sendToIRC(discordID, topicMSG, socketID);
const todayDate = new Date();
const seconds = todayDate.getTime() / 1000;
const topicMSG2 = `:${configuration.ircServer.hostname} 333 ${nickname} #${channel} noboyknows!orCares@whatever ${seconds}\r\n`;
sendToIRC(discordID, topicMSG2, socketID);
memberListLines.forEach(function(line) {
const memberListMSG = `${memberlistTemplate}${line}\r\n`;
console.log(memberListMSG);
sendToIRC(discordID, memberListMSG, socketID);
});
const endListMSG = `:${configuration.ircServer.hostname} 366 ${nickname} #${channel} :End of /NAMES list.\r\n`;
console.log(endListMSG);
sendToIRC(discordID, endListMSG, socketID);
const socketDetails = getSocketDetails(socketID);
if (socketDetails.awayNotify) {
for (let key in ircDetails[discordID].channels[channel].members) {
if (ircDetails[discordID].channels[channel].members.hasOwnProperty(key)) {
let member = ircDetails[discordID].channels[channel].members[key];
let nickname = member.ircNick;
switch (member.discordState) {
case 'idle':
sendToIRC(discordID, `:${nickname}!${member.id}@whatever AWAY :Idle\r\n`, socketID);
break;
case 'dnd':
sendToIRC(discordID, `:${nickname}!${member.id}@whatever AWAY :Do not disturb\r\n`, socketID);
break;
case 'offline':
if (configuration.showOfflineUsers) {
sendToIRC(discordID, `:${nickname}!${member.id}@whatever AWAY :Offline\r\n`, socketID);
}
break;
}
}
}
}
} else {
sendToIRC(discordID, `:${configuration.ircServer.hostname} 473 ${nickname} #${channel} :Cannot join channel\r\n`, socketID);
}
}
// List command, let's give back a list of channels.
function listCommand(discordID, ircID) {
if (discordID === 'DMserver') return;
const nickname = ircDetails[discordID].ircDisplayName;
const channels = discordClient.guilds.get(discordID).channels.array();
let listResponse = [`:${configuration.ircServer.hostname} 321 ${nickname} Channel :Users Name\r\n`];
channels.forEach(function(channel) {
if (channel.type === 'text') {
const channelname = channel.name,
memberCount = channel.members.array().length,
channeltopic = channel.topic;
const channelDetails = `:${configuration.ircServer.hostname} 322 ${nickname} #${channelname} ${memberCount} :${channeltopic}\r\n`;
listResponse.push(channelDetails);
}
});
const endlist = `:${configuration.ircServer.hostname} 323 ${nickname} :End of channel list.\r\n`;
listResponse.push(endlist);
listResponse.forEach(function(line) {
sendToIRC(discordID, line, ircID);
});
}
// Part command given, let's part the channel.
function partCommand(channel, discordID, ircID) {
const nickname = ircDetails[discordID].ircDisplayName;
if (ircDetails[discordID].channels.hasOwnProperty(channel)) {
// Let's clear the channel
const socketIndex = ircDetails[discordID].channels[channel].joined.indexOf(ircID);
if (socketIndex > -1) {
ircDetails[discordID].channels[channel].joined.splice(socketIndex, 1);
}
// If no other sockets are connected we clear the channel.
if (ircDetails[discordID].channels[channel].joined.length === 0) {
ircDetails[discordID].channels[channel].members = {};
}
sendToIRC(discordID, `:${nickname}!${discordClient.user.id}@whatever PART #${channel}\r\n`, ircID);
}
}
function getDiscordUserFromIRC(recipient, discordID) {
let returnmember;
if (discordID === 'DMserver') {
discordClient.users.array().forEach(function(user) {
const isBot = user.bot;
const discriminator = user.discriminator;
const displayMember = ircNickname(user.username, isBot, discriminator);
if (displayMember === recipient) {
returnmember = user;
}
});
} else {
discordClient.guilds.get(discordID).members.array().forEach(function(member) {
const isBot = member.user.bot;
const discriminator = member.user.discriminator;
const displayMember = ircNickname(member.displayName, isBot, discriminator);
if (displayMember === recipient) {
returnmember = member;
}
});
}
return returnmember;
}
//
// Irc Related functionality.
//
let ircServer = net.createServer(netOptions, function(socket) {
console.log('new socket');
socket.setEncoding('utf8');
ircClientCount++;
socket.ircid = ircClientCount;
socket.discordid = '';
socket.nickname = '';
socket.user = '';
socket.pongcount = 1;
socket.isCAPBlocked = false;
socket.authenticated = false;
socket.connectArray = [];
socket.awayNotify = false;
socket.on('error', function(error) {
console.log('Socket error: ', error);
socket.end();
});
socket.on('data', function(data) {
console.log('data:', data);
// Data can be multiple lines. Here we put each line in an array.
let dataArray = data.match(/.+/g);
dataArray.forEach(function(line) {
let parsedLine = parseMessage(line);
// Dealing with IRC v3.1 capability negotiation.
// http://ircv3.net/specs/core/capability-negotiation-3.1.html
// v3.2 is also available but does not seem to add anything we need.
if (parsedLine.command === 'CAP') {
const capSubCommand = parsedLine.params[0];
let nickname;
if (socket.nickname) {
nickname = socket.nickname;
} else {
nickname = '*';
}
switch (capSubCommand) {
case 'LS':
socket.isCAPBlocked = true;
socket.write(`:${configuration.ircServer.hostname} CAP ${nickname} LS :away-notify\r\n`);
break;
case 'LIST':
if (socket.awayNotify) {
socket.write(`:${configuration.ircServer.hostname} CAP ${nickname} LIST :away-notify\r\n`);
} else {
socket.write(`:${configuration.ircServer.hostname} CAP ${nickname} LIST :\r\n`);
}
break;
case 'REQ':
const requestedCapabilities = parsedLine.params[1];
if (requestedCapabilities === 'away-notify') {
socket.write(`:${configuration.ircServer.hostname} CAP ${nickname} ACK :away-notify\r\n`);
socket.awayNotify = true;
} else if (requestedCapabilities === '-away-notify') {
socket.write(`:${configuration.ircServer.hostname} CAP ${nickname} ACK :-away-notify\r\n`);
socket.awayNotify = false;
} else {
socket.write(`:${configuration.ircServer.hostname} CAP ${nickname} NAK :${requestedCapabilities}\r\n`);
}
socket.write(`:${configuration.ircServer.hostname} CAP ${nickname} ACK :${parsedLine.params[0]}\r\n`);
break;
case 'ACK':
// Not expecting this from a client at this point. However we'll leave it in here for future use.
break;
case 'END':
socket.isCAPBlocked = false;
// Now we check if the user was already authenticated and the whole welcome thing has been send.
if (socket.connectArray.length > 0) {
socket.connectArray.forEach(function(line) {
socket.write(line);
});
// And empty it.
socket.connectArray = [];
}
break;
default:
// We have no idea what we are dealing with. Inform the client.
socket.write(`:${configuration.ircServer.hostname} 410 * ${capSubCommand} :Invalid CAP command\r\n`);
break;
}
}
if (!socket.authenticated) {
switch (parsedLine.command) {
case 'PASS':
socket.discordid = parsedLine.params[0];
break;
case 'NICK':
socket.nickname = parsedLine.params[0];
break;
case 'USER':
// So different irc clients use different formats for user it seems.
// TODO: figure out how to properly parse this.
let username = parsedLine.params[0];
let usernameAlternative = parsedLine.params[3];
socket.user = username;
let nickname = socket.nickname;
// We are abusing some irc functionality here to add a tiny bit of security.
// The username the ircclient gives must match with that in the configuration.
// If the username is correct and the discordId can be found we are in bussiness.
if (username === configuration.ircServer.username || usernameAlternative === configuration.ircServer.username) {
// Now we are connected let's change the nickname first to whatever it is on discord.
if (socket.discordid === 'DMserver') {
const newuser = discordClient.user.username;
const discriminator = discordClient.user.discriminator;
const newNickname = ircNickname(newuser, false, discriminator);
ircDetails[socket.discordid]['discordDisplayName'] = newuser;
ircDetails[socket.discordid]['ircDisplayName'] = newNickname;
socket.user = newuser;
socket.nickname = newNickname;
socket.authenticated = true;
const connectArray = [
`:${nickname}!${discordClient.user.id}@whatever NICK ${newNickname}\r\n`,
`:${configuration.ircServer.hostname} 001 ${newNickname} :Welcome to the fake Internet Relay Chat Network ${newNickname}\r\n`,
`:${configuration.ircServer.hostname} 003 ${newNickname} :This server was created specifically for you\r\n`
];
connectArray.forEach(function(line) {
socket.write(line);
});
} else if (discordClient.guilds.get(socket.discordid)) {
// I am fairly certain there must be a simpler way to find out... but I haven't found it yet.
discordClient.guilds.get(socket.discordid).fetchMember(discordClient.user.id).then(function(guildMember) {
const newuser = guildMember.displayName;
const discriminator = discordClient.user.discriminator;
const newNickname = ircNickname(newuser, false, discriminator);
ircDetails[socket.discordid]['discordDisplayName'] = newuser;
ircDetails[socket.discordid]['ircDisplayName'] = newNickname;
socket.user = newuser;
socket.nickname = newNickname;
socket.authenticated = true;
//console.log(`:${configuration.ircServer.hostname} NOTICE Auth :*** Looking up your hostname...\r\n`);
const connectArray = [
`:${nickname}!${discordClient.user.id}@whatever NICK ${newNickname}\r\n`,
`:${configuration.ircServer.hostname} 001 ${newNickname} :Welcome to the fake Internet Relay Chat Network ${newNickname}\r\n`,
`:${configuration.ircServer.hostname} 003 ${newNickname} :This server was created specifically for you\r\n`
];
// If we are waiting on CAP negotiation we write the connection array to the socket and this will be processed once proper CAP END is received.
if (socket.isCAPBlocked) {
socket.connectArray = connectArray;
} else {
connectArray.forEach(function(line) {
socket.write(line);
});
}
});
} else {
// Things are not working out, let's end this.
console.log(`${nickname}: Failed to connect to ${socket.discordid}`)
socket.write(`:${configuration.ircServer.hostname} 464 ${nickname} :Failed to connect to ${socket.discordid}\r\n`);
socket.end();
}
} else {
// Things are not working out, let's end this.
console.log(`${nickname}: Invalid login, expected ${configuration.ircServer.username}, got ${username} (${usernameAlternative})`);
socket.write(`:${configuration.ircServer.hostname} 464 ${nickname} :Invalid login. Make sure your username matches the username set in the config.\r\n`);
socket.end();
}
break;
}
}
if (socket.authenticated && !socket.isCAPBlocked) {
switch (parsedLine.command) {
case 'JOIN':
const joinChannels = parsedLine.params[0].split(',');
joinChannels.forEach(function(channel) {
joinCommand(channel.substring(1), socket.discordid, socket.ircid);
});
break;
case 'PART':
const partChannels = parsedLine.params[0].split(',');
partChannels.forEach(function(channel) {
partCommand(channel.substring(1), socket.discordid, socket.ircid);
});
break;
case 'PRIVMSG':
const recipient = parsedLine.params[0];
if (recipient.startsWith('#')) {
const channelName = recipient.substring(1);
const sendLine = parseIRCLine(parsedLine.params[1], socket.discordid, channelName);
if (ircDetails[socket.discordid].lastPRIVMSG.length > 3) {
ircDetails[socket.discordid].lastPRIVMSG.shift();
}
ircDetails[socket.discordid].lastPRIVMSG.push(sendLine.trim());
discordClient.channels.get(ircDetails[socket.discordid].channels[channelName].id).sendMessage(sendLine);
} else if (recipient !== 'discordIRCd') {
const recipientUser = getDiscordUserFromIRC(recipient, socket.discordid);
const sendLine = parsedLine.params[1];
recipientUser.sendMessage(sendLine);
ircDetails[socket.discordid].lastPRIVMSG.push(sendLine.trim());
if (ircDetails[socket.discordid].lastPRIVMSG.length > 3) {
ircDetails[socket.discordid].lastPRIVMSG.shift();
}
if (socket.discordid !== 'DMserver') {
const messageTemplate = `:${socket.nickname}!${discordClient.user.id}@whatever PRIVMSG ${recipient} :PM Send: Note that replies will not arrive here but on the PM server\r\n`;
socket.write(messageTemplate);
}
if (ircDetails[socket.discordid].lastPRIVMSG.length > 3) {
ircDetails[socket.discordid].lastPRIVMSG.shift();
}
}
break;
case 'QUIT':
for (let channel in ircDetails[socket.discordid].channels) {
if (ircDetails[socket.discordid].channels.hasOwnProperty(channel) && ircDetails[socket.discordid].channels[channel].joined.indexOf(socket.ircid) > 0) {
const socketIndex = ircDetails[socket.discordid].channels[channel].joined.indexOf(socket.ircid);
if (socketIndex > -1) {
ircDetails[socket.discordid].channels[channel].joined.splice(socketIndex, 1);
}
}
}
socket.end();
break;
case 'PING':
socket.write(`:${configuration.ircServer.hostname} PONG ${configuration.ircServer.hostname} :${socket.pongcount}\r\n`);
socket.pongcount = socket.pongcount + 1;
break;
case 'LIST':
listCommand(socket.discordid, socket.ircid);
break;
case 'WHOIS':
const whoisUser = parsedLine.params[0].trim();
const userID = ircDetails[socket.discordid].members[whoisUser];
break;
}
}
});
});
ircClients.push(socket);
// When a client is ended we remove it from the list of clients.
socket.on('end', function() {
ircClients.splice(ircClients.indexOf(socket), 1);
});
});
// Function for sending messages to the connect irc clients
function sendToIRC(discordServerId, line, ircid = 0) {
ircClients.forEach(function(socket) {
if (socket.discordid === discordServerId && socket.authenticated && !socket.isCAPBlocked && (ircid === 0 || ircid === socket.ircid)) {
socket.write(line);
}
});
}
function
|
(socketID) {
let socketDetails = {};
ircClients.forEach(function(socket) {
if (socket.ircid === socketID) {
socketDetails = {
ircid: socket.ircid,
discordid: socket.discordid,
nickname: socket.nickname,
user: socket.user,
isCAPBlocked: socket.isCAPBlocked,
authenticated: socket.authenticated,
awayNotify: socket.awayNotify
};
}
});
return socketDetails;
}
// Sending notices to all connected clients.
function sendGeneralNotice(noticeText) {
ircClients.forEach(function(socket) {
const notice = `:${configuration.ircServer.hostname} NOTICE ${socket.nickname} :${noticeText}\r\n`;
socket.write(notice);
});
}
// We want to be able to kill the process without having to deal with leftover connections.
process.on('SIGINT', function() {
console.log('\nGracefully shutting down from SIGINT (Ctrl-C)');
sendGeneralNotice('IRC server has been shut down through SIGINT');
ircClients.forEach(function(socket) {
socket.end();
});
discordClient.destroy();
ircServer.close();
process.exit();
});
|
getSocketDetails
|
pod_repository.go
|
// Code generated by counterfeiter. DO NOT EDIT.
package fake
import (
"context"
"sync"
"code.cloudfoundry.org/cf-k8s-controllers/api/actions"
"code.cloudfoundry.org/cf-k8s-controllers/api/authorization"
"code.cloudfoundry.org/cf-k8s-controllers/api/repositories"
)
type PodRepository struct {
ListPodStatsStub func(context.Context, authorization.Info, repositories.ListPodStatsMessage) ([]repositories.PodStatsRecord, error)
listPodStatsMutex sync.RWMutex
listPodStatsArgsForCall []struct {
arg1 context.Context
arg2 authorization.Info
arg3 repositories.ListPodStatsMessage
}
listPodStatsReturns struct {
result1 []repositories.PodStatsRecord
result2 error
}
listPodStatsReturnsOnCall map[int]struct {
result1 []repositories.PodStatsRecord
result2 error
}
invocations map[string][][]interface{}
invocationsMutex sync.RWMutex
}
func (fake *PodRepository) ListPodStats(arg1 context.Context, arg2 authorization.Info, arg3 repositories.ListPodStatsMessage) ([]repositories.PodStatsRecord, error) {
fake.listPodStatsMutex.Lock()
ret, specificReturn := fake.listPodStatsReturnsOnCall[len(fake.listPodStatsArgsForCall)]
fake.listPodStatsArgsForCall = append(fake.listPodStatsArgsForCall, struct {
arg1 context.Context
arg2 authorization.Info
arg3 repositories.ListPodStatsMessage
}{arg1, arg2, arg3})
stub := fake.ListPodStatsStub
fakeReturns := fake.listPodStatsReturns
fake.recordInvocation("ListPodStats", []interface{}{arg1, arg2, arg3})
fake.listPodStatsMutex.Unlock()
if stub != nil {
return stub(arg1, arg2, arg3)
}
if specificReturn {
return ret.result1, ret.result2
}
return fakeReturns.result1, fakeReturns.result2
}
func (fake *PodRepository) ListPodStatsCallCount() int {
fake.listPodStatsMutex.RLock()
defer fake.listPodStatsMutex.RUnlock()
return len(fake.listPodStatsArgsForCall)
}
func (fake *PodRepository) ListPodStatsCalls(stub func(context.Context, authorization.Info, repositories.ListPodStatsMessage) ([]repositories.PodStatsRecord, error)) {
fake.listPodStatsMutex.Lock()
defer fake.listPodStatsMutex.Unlock()
fake.ListPodStatsStub = stub
}
func (fake *PodRepository) ListPodStatsArgsForCall(i int) (context.Context, authorization.Info, repositories.ListPodStatsMessage) {
fake.listPodStatsMutex.RLock()
defer fake.listPodStatsMutex.RUnlock()
argsForCall := fake.listPodStatsArgsForCall[i]
return argsForCall.arg1, argsForCall.arg2, argsForCall.arg3
}
func (fake *PodRepository) ListPodStatsReturns(result1 []repositories.PodStatsRecord, result2 error) {
fake.listPodStatsMutex.Lock()
defer fake.listPodStatsMutex.Unlock()
fake.ListPodStatsStub = nil
|
fake.listPodStatsReturns = struct {
result1 []repositories.PodStatsRecord
result2 error
}{result1, result2}
}
func (fake *PodRepository) ListPodStatsReturnsOnCall(i int, result1 []repositories.PodStatsRecord, result2 error) {
fake.listPodStatsMutex.Lock()
defer fake.listPodStatsMutex.Unlock()
fake.ListPodStatsStub = nil
if fake.listPodStatsReturnsOnCall == nil {
fake.listPodStatsReturnsOnCall = make(map[int]struct {
result1 []repositories.PodStatsRecord
result2 error
})
}
fake.listPodStatsReturnsOnCall[i] = struct {
result1 []repositories.PodStatsRecord
result2 error
}{result1, result2}
}
func (fake *PodRepository) Invocations() map[string][][]interface{} {
fake.invocationsMutex.RLock()
defer fake.invocationsMutex.RUnlock()
fake.listPodStatsMutex.RLock()
defer fake.listPodStatsMutex.RUnlock()
copiedInvocations := map[string][][]interface{}{}
for key, value := range fake.invocations {
copiedInvocations[key] = value
}
return copiedInvocations
}
func (fake *PodRepository) recordInvocation(key string, args []interface{}) {
fake.invocationsMutex.Lock()
defer fake.invocationsMutex.Unlock()
if fake.invocations == nil {
fake.invocations = map[string][][]interface{}{}
}
if fake.invocations[key] == nil {
fake.invocations[key] = [][]interface{}{}
}
fake.invocations[key] = append(fake.invocations[key], args)
}
var _ actions.PodRepository = new(PodRepository)
| |
scheduler.go
|
// Copyright 2021 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
|
package owner
import (
"github.com/pingcap/ticdc/cdc/model"
"github.com/pingcap/ticdc/pkg/orchestrator"
)
// scheduler is an interface for scheduling tables.
// Since in our design, we do not record checkpoints per table,
// how we calculate the global watermarks (checkpoint-ts and resolved-ts)
// is heavily coupled with how tables are scheduled.
// That is why we have a scheduler interface that also reports the global watermarks.
type scheduler interface {
// Tick is called periodically from the owner, and returns
// updated global watermarks.
Tick(
state *orchestrator.ChangefeedReactorState,
currentTables []model.TableID,
captures map[model.CaptureID]*model.CaptureInfo,
) (newCheckpointTs, newResolvedTs model.Ts, err error)
// MoveTable is used to trigger manual table moves.
MoveTable(tableID model.TableID, target model.CaptureID)
// Rebalance is used to trigger manual workload rebalances.
Rebalance()
}
|
// limitations under the License.
|
client.py
|
#coding=utf-8
#!/bin/env python
import os
import base64
import socket
import numpy
import time
m_serv_ip = '10.230.147.31'
m_serv_port = 9999
def init_socket(serv_ip, serv_port):
""""""
ip_port = (serv_ip, serv_port)
sk = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) # TCP
#sk = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0) # UDP
sk.connect(ip_port)
sk.settimeout(50)
return sk
def send_socket(sk, b64, name):
""""""
len_content = len(b64) + len(name)
sk.sendall(bytes(str(len_content).zfill(16), encoding='utf-8')) # 发送头部
sk.sendall(bytes(name, encoding='utf-8'))
#sk.sendall(str(len_content).zfill(16)) # 发送头部
#sk.sendall(name)
sk.sendall(b64) # 发送内容
sk.close()
def img_to_b64(img_path):
"""显示一副图片"""
assert os.path.isfile(img_path)
with open(img_path, 'rb') as f:
img = f.read()
b64 = base64.b64encode(img)
return b64
def get_img_names(img_dir):
""""""
assert os.path.isdir(img_dir)
names_all = os.listdir(img_dir)
names = [name for name in names_all if name.endswith('.jpg')]
print('目录 {0} 下文件总数: {1}, 图片总数: {2}'.format(img_dir, len(names_all), len(names)))
return names
def send_batch(img_dir, img_names, start_idx, batch_num=10):
"""显示指定目录下的所有图片"""
global m_serv_ip
global m_serv_
|
t0 = time.clock()
img_names = get_img_names(img_dir)
num_img = len(img_names)
num_finish = 0 # 已完成个数
start_idx = 0 # batch起始索引号
num_batch = 0
while num_finish < num_img:
max_num = 0
num_left = num_img - num_finish
if num_left < batch_size:
max_num = num_left
else:
max_num = batch_size
send_batch(img_dir, img_names, start_idx, max_num)
start_idx += max_num
num_finish += max_num
num_batch += 1
if num_batch >= max_batch:
break
print('client finish, time elapsed: {0}'.format(time.clock() - t0))
if __name__ == '__main__':
client('../data/problem3/train', batch_size=20, max_batch=10000)
|
port
t0 = time.clock()
t1 = time.clock()
for cnt, img_name in enumerate(img_names[start_idx: start_idx + batch_num]):
img_path = os.path.join(img_dir, img_name)
b64 = img_to_b64(img_path) # 获得b64编码
sk = init_socket(m_serv_ip, m_serv_port)
send_socket(sk, b64, img_name.rstrip('.jpg')) # 发送数据
t2 = time.clock()
print('cnt {0} finish, time elapsed: {1}, total elapsed: {2}'.format(cnt, t2 - t1, t2 - t0))
t1 = t2
print('all finished, num send: {0}, time elapsed: {1}'.format(len(img_names), time.clock() - t0))
#sk.close()
def client(img_dir, batch_size, max_batch):
""""""
assert os.path.isdir(img_dir)
|
settings_table.rs
|
// Copyright 2020 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::any::Any;
use std::sync::Arc;
use common_datablocks::DataBlock;
use common_datavalues::prelude::*;
use common_exception::Result;
use common_planners::Part;
use common_planners::ReadDataSourcePlan;
use common_planners::ScanPlan;
use common_planners::Statistics;
use common_streams::DataBlockStream;
use common_streams::SendableDataBlockStream;
use crate::datasources::Table;
use crate::sessions::DatafuseQueryContextRef;
pub struct SettingsTable {
schema: DataSchemaRef,
}
impl SettingsTable {
pub fn create() -> Self {
SettingsTable {
schema: DataSchemaRefExt::create(vec![
DataField::new("name", DataType::Utf8, false),
DataField::new("value", DataType::Utf8, false),
DataField::new("default_value", DataType::Utf8, false),
DataField::new("description", DataType::Utf8, false),
]),
}
}
}
#[async_trait::async_trait]
impl Table for SettingsTable {
fn name(&self) -> &str {
"settings"
}
fn engine(&self) -> &str {
"SystemSettings"
}
fn as_any(&self) -> &dyn Any {
self
}
fn schema(&self) -> Result<DataSchemaRef> {
Ok(self.schema.clone())
}
fn is_local(&self) -> bool {
true
}
fn
|
(
&self,
_ctx: DatafuseQueryContextRef,
scan: &ScanPlan,
_partitions: usize,
) -> Result<ReadDataSourcePlan> {
Ok(ReadDataSourcePlan {
db: "system".to_string(),
table: self.name().to_string(),
table_id: scan.table_id,
table_version: scan.table_version,
schema: self.schema.clone(),
parts: vec![Part {
name: "".to_string(),
version: 0,
}],
statistics: Statistics::default(),
description: "(Read from system.settings table)".to_string(),
scan_plan: Arc::new(scan.clone()),
remote: false,
})
}
async fn read(
&self,
ctx: DatafuseQueryContextRef,
_source_plan: &ReadDataSourcePlan,
) -> Result<SendableDataBlockStream> {
let settings = ctx.get_settings();
let mut names: Vec<String> = vec![];
let mut values: Vec<String> = vec![];
let mut default_values: Vec<String> = vec![];
let mut descs: Vec<String> = vec![];
for setting in settings.iter() {
if let DataValue::Struct(vals) = setting {
names.push(format!("{:?}", vals[0]));
values.push(format!("{:?}", vals[1]));
default_values.push(format!("{:?}", vals[2]));
descs.push(format!("{:?}", vals[3]));
}
}
let names: Vec<&str> = names.iter().map(|x| x.as_str()).collect();
let values: Vec<&str> = values.iter().map(|x| x.as_str()).collect();
let default_values: Vec<&str> = default_values.iter().map(|x| x.as_str()).collect();
let descs: Vec<&str> = descs.iter().map(|x| x.as_str()).collect();
let block = DataBlock::create_by_array(self.schema.clone(), vec![
Series::new(names),
Series::new(values),
Series::new(default_values),
Series::new(descs),
]);
Ok(Box::pin(DataBlockStream::create(
self.schema.clone(),
None,
vec![block],
)))
}
}
|
read_plan
|
main.rs
|
use reqwest;
#[macro_use]
pub extern crate integra8;
main_test! {
console_output: integra8_serde_formatter::SerdeFormatter,
parameters : {
#[structopt(long = "target-url", default_value = "https://httpbin.org/ip")]
pub url: String,
|
#[integration_test]
async fn httpbin_should_reply_200_ok(ctx : crate::ExecutionContext) {
#[cfg(feature = "tokio-runtime")]
let response = reqwest::get(&ctx.parameters.app.url).await.unwrap();
// reqwest does not support async-std, so blocking must be used instead.
// Its recommended to use async for these types of tests, as
// integra8 will run other tests while this test waits for a response
#[cfg(feature = "async-std-runtime")]
let response = reqwest::blocking::get(&ctx.parameters.app.url).unwrap();
assert_eq!(response.status(), 200, "Expected http 200 response");
}
|
}
}
|
models.py
|
from decimal import ROUND_DOWN, Decimal
from django.db import models
from lorikeet.exceptions import PaymentError
from lorikeet.models import (
Adjustment,
DeliveryAddress,
LineItem,
Payment,
PaymentMethod,
)
AUSTRALIAN_STATES = (
("NSW", "New South Wales"),
("VIC", "Victoria"),
("QLD", "Queensland"),
("WA", "Western Australia"),
("SA", "South Australia"),
("TAS", "Tasmania"),
("ACT", "Australian Capital Territory"),
("NT", "Northern Territory"),
)
class Product(models.Model):
name = models.CharField(max_length=255)
unit_price = models.DecimalField(max_digits=7, decimal_places=2)
class MyLineItem(LineItem):
product = models.ForeignKey(Product, on_delete=models.PROTECT)
quantity = models.PositiveSmallIntegerField()
def get_total(self):
return self.quantity * self.product.unit_price
class AustralianDeliveryAddress(DeliveryAddress):
addressee = models.CharField(max_length=255)
address = models.TextField()
suburb = models.CharField(max_length=255)
state = models.CharField(max_length=3, choices=AUSTRALIAN_STATES)
postcode = models.CharField(max_length=4)
class PipeCard(PaymentMethod):
card_id = models.CharField(max_length=30)
def make_payment(self, order, amount):
if self.card_id.endswith("9"):
raise PaymentError("Insufficient funds")
return PipePayment.objects.create(method=self, amount=amount)
class PipePayment(Payment):
amount = models.DecimalField(max_digits=7, decimal_places=2)
class CartDiscount(Adjustment):
percentage = models.PositiveSmallIntegerField()
def
|
(self, subtotal):
if not isinstance(subtotal, Decimal):
raise TypeError(subtotal)
discount = -subtotal * self.percentage / 100
return discount.quantize(Decimal(".01"), rounding=ROUND_DOWN)
|
get_total
|
WeatherReader.py
|
import json
import requests
import os
from dotenv import load_dotenv
import datetime
load_dotenv()
batch_size = 130
def get_raw_weather(ids, start_date, end_date):
request_ids = '&stationid='.join(id for id in ids)
return requests.get('https://www.ncdc.noaa.gov/cdo-web/api/v2/data?datasetid=GHCND&stationid=' + request_ids +
'&startdate=' + start_date + '&enddate=' + end_date + '&limit=1000', headers={'token': os.getenv('NOAA_TOKEN')})
def get_weather(stations, start_date, end_date):
raw_weather = get_raw_weather([station['id'] for station in stations], start_date, end_date)
if raw_weather.status_code == 429:
print('No requests left!')
return None
if raw_weather.status_code != 200:
print('Some problems, status code {}'.format(raw_weather.status_code))
return None
raw_weather = raw_weather.json()
if(raw_weather == {}):
return {}
processed_weather = {station['id']: {'latitude': station['latitude'], 'longitude': station['longitude'], 'elevation': station['elevation']} for station in stations}
for measurement in raw_weather['results']:
processed_weather[measurement['station']][measurement['datatype']] = measurement['value']
return {station: measurements for station, measurements in processed_weather.items() if len(measurements) > 3}
def get_weather_for_all_stations(date):
offset = 0
all_weather = {}
with open('StationsForMeasurement.json', 'r') as read_file:
stations = json.load(read_file)
with open('Measurements/' + date + '.json', 'w') as write_file:
write_file.truncate(0)
while offset < len(stations):
try:
weather = get_weather(stations[offset: offset + batch_size], date, date)
if weather == None:
return False
all_weather.update(weather)
except Exception as e:
print(e)
if type(e) == KeyboardInterrupt:
exit()
offset += batch_size
print(str(round(min(offset / len(stations) * 100, 100), 1)) + "% complete")
json.dump(all_weather, write_file, indent=2)
write_file.close()
read_file.close()
return True
def get_weather_ALAP(start_date):
can_get_more = True
|
print('Processing ' + cur_date.strftime('%Y-%m-%d'))
can_get_more = get_weather_for_all_stations(cur_date.strftime('%Y-%m-%d'))
cur_date += datetime.timedelta(days=-1)
get_weather_ALAP('2019-02-13')
|
cur_date = datetime.datetime.strptime(start_date, '%Y-%m-%d')
while can_get_more:
|
config.rs
|
//!
//! # Main Configuration file
//!
//! Contains contexts, profiles
//!
use std::env;
use std::fs::read_to_string;
use std::io::Error as IoError;
use std::io::ErrorKind;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::collections::HashMap;
use std::fs::File;
use std::fs::create_dir_all;
use thiserror::Error;
use tracing::debug;
use dirs::home_dir;
use serde::Deserialize;
use serde::Serialize;
use fluvio_types::defaults::{CLI_CONFIG_PATH};
use crate::{FluvioConfig, FluvioError};
#[derive(Error, Debug)]
pub enum ConfigError {
#[error(transparent)]
ConfigFileError {
#[from]
source: IoError,
},
#[error("Failed to deserialize Fluvio config")]
TomlError {
#[from]
source: toml::de::Error,
},
#[error("Config has no active profile")]
NoActiveProfile,
#[error("No cluster config for profile {profile}")]
NoClusterForProfile { profile: String },
}
pub struct ConfigFile {
path: PathBuf,
config: Config,
}
impl ConfigFile {
fn new(path: PathBuf, config: Config) -> Self {
Self { path, config }
}
/// create default profile
pub fn default_config() -> Result<Self, IoError> {
let path = Self::default_file_path(None)?;
Ok(Self {
path,
config: Config::new(),
})
}
/// load from default location if not found, create new one
pub fn load_default_or_new() -> Result<Self, IoError> {
match Self::load(None) {
Ok(config_file) => Ok(config_file),
Err(err) => {
// if doesn't exist, we create new profile
debug!("profile can't be loaded, creating new one: {}", err);
ConfigFile::default_config()
}
}
}
/// try to load from default locations
pub fn load(optional_path: Option<String>) -> Result<Self, FluvioError> {
let path = Self::default_file_path(optional_path)
.map_err(|source| ConfigError::ConfigFileError { source })?;
Self::from_file(path)
}
/// read from file
fn from_file<T: AsRef<Path>>(path: T) -> Result<Self, FluvioError> {
let path_ref = path.as_ref();
let file_str: String =
read_to_string(path_ref).map_err(|source| ConfigError::ConfigFileError { source })?;
let config =
toml::from_str(&file_str).map_err(|source| ConfigError::TomlError { source })?;
Ok(Self::new(path_ref.to_owned(), config))
}
/// find default path where config is stored. precedent is:
/// 1) supplied path
/// 2) environment variable in FLV_PROFILE_PATH
/// 3) home directory ~/.fluvio/config
fn default_file_path(path: Option<String>) -> Result<PathBuf, IoError> {
path.map(|p| Ok(PathBuf::from(p))).unwrap_or_else(|| {
env::var("FLV_PROFILE_PATH")
.map(|p| Ok(PathBuf::from(p)))
.unwrap_or_else(|_| {
if let Some(mut profile_path) = home_dir() {
profile_path.push(CLI_CONFIG_PATH);
profile_path.push("config");
Ok(profile_path)
} else {
Err(IoError::new(
ErrorKind::InvalidInput,
"can't get profile directory",
))
}
})
})
}
/// Return a reference to the internal Config
pub fn config(&self) -> &Config {
&self.config
}
/// Return a mutable reference to the internal Config
pub fn mut_config(&mut self) -> &mut Config {
&mut self.config
}
// save to file
pub fn save(&self) -> Result<(), FluvioError> {
create_dir_all(self.path.parent().unwrap())
.map_err(|source| ConfigError::ConfigFileError { source })?;
self.config
.save_to(&self.path)
.map_err(|source| ConfigError::ConfigFileError { source })?;
Ok(())
}
}
pub const LOCAL_PROFILE: &str = "local";
const CONFIG_VERSION: &str = "2.0";
#[derive(Debug, Default, PartialEq, Serialize, Deserialize)]
pub struct Config {
version: String,
current_profile: Option<String>,
profile: HashMap<String, Profile>,
cluster: HashMap<String, FluvioConfig>,
client_id: Option<String>,
}
impl Config {
pub fn new() -> Self {
Self {
version: CONFIG_VERSION.to_owned(),
..Default::default()
}
}
/// create new config with a single local cluster
pub fn new_with_local_cluster(domain: String) -> Self {
let cluster = FluvioConfig::new(domain);
let mut config = Self::new();
config.cluster.insert(LOCAL_PROFILE.to_owned(), cluster);
let profile_name = LOCAL_PROFILE.to_owned();
let local_profile = Profile::new(profile_name.clone());
config.profile.insert(profile_name.clone(), local_profile);
config.set_current_profile(&profile_name);
config
}
/// add new cluster
pub fn add_cluster(&mut self, cluster: FluvioConfig, name: String) {
self.cluster.insert(name, cluster);
}
pub fn add_profile(&mut self, profile: Profile, name: String) {
self.profile.insert(name, profile);
}
// save to file
fn save_to<T: AsRef<Path>>(&self, path: T) -> Result<(), IoError> {
let path_ref = path.as_ref();
debug!("saving config: {:#?} to: {:#?}", self, path_ref);
let toml =
toml::to_vec(self).map_err(|err| IoError::new(ErrorKind::Other, format!("{}", err)))?;
let mut file = File::create(path_ref)?;
file.write_all(&toml)
}
pub fn version(&self) -> &str {
&self.version
}
/// current profile
pub fn current_profile_name(&self) -> Option<&str> {
self.current_profile.as_ref().map(|c| c.as_ref())
}
/// set current profile, if profile doesn't exists return false
pub fn set_current_profile(&mut self, profile_name: &str) -> bool {
if self.profile.contains_key(profile_name) {
self.current_profile = Some(profile_name.to_owned());
true
} else {
false
}
}
/// delete profile
pub fn delete_profile(&mut self, profile_name: &str) -> bool {
if self.profile.remove(profile_name).is_some() {
if let Some(old_profile) = &self.current_profile {
// check if it same as current profile, then remove it
if profile_name == old_profile {
self.current_profile = None;
}
}
true
} else {
false
}
}
/// Deletes the named cluster, whether it is being used or not.
///
/// You may want to check if the named cluster is active or not using
/// `delete_cluster_check`. Otherwise, you may remove a cluster that
/// is being used by the active profile.
///
/// # Example
///
/// ```
/// # use fluvio::FluvioConfig;
/// # use fluvio::config::{Config, Profile};
/// let mut config = Config::new();
/// let cluster = FluvioConfig::new("https://cloud.fluvio.io".to_string());
/// config.add_cluster(cluster, "fluvio-cloud".to_string());
/// let profile = Profile::new("fluvio-cloud".to_string());
/// config.add_profile(profile, "fluvio-cloud".to_string());
///
/// config.delete_cluster("fluvio-cloud").unwrap();
/// assert!(config.cluster("fluvio-cloud").is_none());
/// ```
pub fn delete_cluster(&mut self, cluster_name: &str) -> Option<FluvioConfig> {
self.cluster.remove(cluster_name)
}
/// Checks whether it's safe to delete the named cluster
///
/// If there are any profiles that reference the named cluster,
/// they are considered conflicts and the cluster is unsafe to delete.
/// When conflicts exist, the conflicting profile names are returned
/// in the `Err()` return value.
///
/// If there are no profile conflicts, this returns with `Ok(())`.
///
/// # Example
///
/// ```
/// # use fluvio::FluvioConfig;
/// # use fluvio::config::{Config, Profile};
/// let mut config = Config::new();
/// let cluster = FluvioConfig::new("https://cloud.fluvio.io".to_string());
/// config.add_cluster(cluster, "fluvio-cloud".to_string());
/// let profile = Profile::new("fluvio-cloud".to_string());
/// config.add_profile(profile, "fluvio-cloud".to_string());
///
/// let conflicts = config.delete_cluster_check("fluvio-cloud").unwrap_err();
/// assert_eq!(conflicts, vec!["fluvio-cloud"]);
/// ```
pub fn delete_cluster_check(&mut self, cluster_name: &str) -> Result<(), Vec<&str>> {
// Find all profiles that reference the named cluster
let conflicts: Vec<_> = self
.profile
.iter()
.filter(|(_, profile)| &*profile.cluster == cluster_name)
.map(|(name, _)| &**name)
.collect();
if !conflicts.is_empty() {
return Err(conflicts);
}
Ok(())
}
/// Returns a reference to the current Profile if there is one.
pub fn current_profile(&self) -> Result<&Profile, FluvioError> {
let profile = self
.current_profile
.as_ref()
.and_then(|p| self.profile.get(p))
.ok_or(ConfigError::NoActiveProfile)?;
Ok(profile)
}
/// Returns a mutable reference to the current Profile if there is one.
pub fn profile_mut(&mut self, profile_name: &str) -> Option<&mut Profile> {
self.profile.get_mut(profile_name)
}
/// Returns the FluvioConfig belonging to the current profile.
pub fn current_cluster(&self) -> Result<&FluvioConfig, FluvioError> {
let profile = self.current_profile()?;
let maybe_cluster = self.cluster.get(&profile.cluster);
let cluster = maybe_cluster.ok_or_else(|| {
let profile = profile.cluster.clone();
ConfigError::NoClusterForProfile { profile }
})?;
Ok(cluster)
}
/// Returns the FluvioConfig belonging to the named profile.
pub fn cluster_with_profile(&self, profile_name: &str) -> Option<&FluvioConfig> {
self.profile
.get(profile_name)
.and_then(|profile| self.cluster.get(&profile.cluster))
}
/// Returns a reference to the named FluvioConfig.
pub fn cluster(&self, cluster_name: &str) -> Option<&FluvioConfig> {
self.cluster.get(cluster_name)
}
/// Returns a mutable reference to the named FluvioConfig.
pub fn cluster_mut(&mut self, cluster_name: &str) -> Option<&mut FluvioConfig> {
self.cluster.get_mut(cluster_name)
}
/// look up replica config
/// this will iterate and find all configuration that can resolve config
/// 1) match all config that matches criteria including asterik
/// 2) apply in terms of precedent
pub fn resolve_replica_config(&self, _topic_name: &str, _partition: i32) -> Replica {
/*
for (key, val) in self.topic.iter() {
println!("key: {:#?}, value: {:#?}",key,val);
}
*/
Replica::default()
}
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct Topic {
replica: HashMap<String, String>,
}
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
pub struct Profile {
pub cluster: String,
pub topic: Option<String>,
pub partition: Option<i32>,
}
impl Profile {
pub fn new(cluster: String) -> Self {
Self {
cluster,
..Default::default()
}
}
pub fn set_cluster(&mut self, cluster: String) {
self.cluster = cluster;
}
}
#[derive(Debug, Default, PartialEq, Serialize, Deserialize)]
pub struct Replica {
pub max_bytes: Option<i32>,
pub isolation: Option<String>,
}
#[cfg(test)]
pub mod test {
use super::*;
use std::path::PathBuf;
use std::env::temp_dir;
use crate::config::{TlsPolicy, TlsConfig, TlsCerts};
#[test]
fn test_default_path_arg() {
assert_eq!(
ConfigFile::default_file_path(Some("/user1/test".to_string())).expect("file"),
PathBuf::from("/user1/test")
);
}
//#[test]
#[allow(unused)]
fn test_default_path_env() {
env::set_var("FLV_PROFILE_PATH", "/user2/config");
assert_eq!(
ConfigFile::default_file_path(None).expect("file"),
PathBuf::from("/user2/config")
);
env::remove_var("FLV_PROFILE_PATH");
}
#[test]
fn test_default_path_home() {
let mut path = home_dir().expect("home dir must exist");
path.push(CLI_CONFIG_PATH);
path.push("config");
assert_eq!(ConfigFile::default_file_path(None).expect("file"), path);
}
/// test basic reading
#[test]
fn test_config() {
|
let config = conf_file.mut_config();
assert_eq!(config.version(), "1.0");
assert_eq!(config.current_profile_name().unwrap(), "local");
let profile = config.current_profile().expect("profile should exists");
assert_eq!(profile.cluster, "local");
assert!(!config.set_current_profile("dummy"));
assert!(config.set_current_profile("local2"));
assert_eq!(config.current_profile_name().unwrap(), "local2");
let cluster = config.current_cluster().expect("cluster should exist");
assert_eq!(cluster.addr, "127.0.0.1:9003");
}
/// test TOML save generation
#[test]
fn test_tls_save() {
let mut config = Config::new_with_local_cluster("localhost:9003".to_owned());
let inline_tls_config = TlsConfig::Inline(TlsCerts {
key: "ABCDEFF".to_owned(),
cert: "JJJJ".to_owned(),
ca_cert: "XXXXX".to_owned(),
domain: "my_domain".to_owned(),
});
println!("temp: {:#?}", temp_dir());
config.cluster_mut(LOCAL_PROFILE).unwrap().tls = inline_tls_config.into();
config
.save_to(temp_dir().join("inline.toml"))
.expect("save should succeed");
config.cluster_mut(LOCAL_PROFILE).unwrap().tls = TlsPolicy::Disabled;
config
.save_to(temp_dir().join("noverf.toml"))
.expect("save should succeed");
}
#[test]
fn test_set_tls() {
let mut conf_file = ConfigFile::load(Some("test-data/profiles/config.toml".to_owned()))
.expect("parse failed");
let config = conf_file.mut_config();
config.set_current_profile("local3");
config
.save_to("/tmp/test_config.toml")
.expect("save should succeed");
let update_conf_file =
ConfigFile::load(Some("/tmp/test_config.toml".to_owned())).expect("parse failed");
assert_eq!(
update_conf_file.config().current_profile_name().unwrap(),
"local3"
);
}
/*
#[test]
fn test_topic_config() {
let conf_file = ConfigFile::load(Some("test-data/profiles/config.toml".to_owned())).expect("parse failed");
let config = conf_file.config().resolve_replica_config("test3",0);
}
*/
#[test]
fn test_local_cluster() {
let config = Config::new_with_local_cluster("localhost:9003".to_owned());
assert_eq!(config.current_profile_name().unwrap(), "local");
let cluster = config.current_cluster().expect("cluster should exists");
assert_eq!(cluster.addr, "localhost:9003");
}
}
|
// test read & parse
let mut conf_file = ConfigFile::load(Some("test-data/profiles/config.toml".to_owned()))
.expect("parse failed");
|
_ddos_protection_plans_operations.py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class DdosProtectionPlansOperations(object):
"""DdosProtectionPlansOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
ddos_protection_plan_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
ddos_protection_plan_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified DDoS protection plan.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_protection_plan_name: The name of the DDoS protection plan.
:type ddos_protection_plan_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
ddos_protection_plan_name=ddos_protection_plan_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
ddos_protection_plan_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.DdosProtectionPlan"
"""Gets information about the specified DDoS protection plan.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_protection_plan_name: The name of the DDoS protection plan.
:type ddos_protection_plan_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DdosProtectionPlan, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_07_01.models.DdosProtectionPlan
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosProtectionPlan"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DdosProtectionPlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
ddos_protection_plan_name, # type: str
parameters, # type: "_models.DdosProtectionPlan"
**kwargs # type: Any
):
# type: (...) -> "_models.DdosProtectionPlan"
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosProtectionPlan"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'DdosProtectionPlan')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('DdosProtectionPlan', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('DdosProtectionPlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
ddos_protection_plan_name, # type: str
parameters, # type: "_models.DdosProtectionPlan"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.DdosProtectionPlan"]
"""Creates or updates a DDoS protection plan.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_protection_plan_name: The name of the DDoS protection plan.
:type ddos_protection_plan_name: str
:param parameters: Parameters supplied to the create or update operation.
:type parameters: ~azure.mgmt.network.v2019_07_01.models.DdosProtectionPlan
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either DdosProtectionPlan or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_07_01.models.DdosProtectionPlan]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosProtectionPlan"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
ddos_protection_plan_name=ddos_protection_plan_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('DdosProtectionPlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
ddos_protection_plan_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.DdosProtectionPlan"
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosProtectionPlan"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DdosProtectionPlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
ddos_protection_plan_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.DdosProtectionPlan"]
"""Update a DDoS protection plan tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param ddos_protection_plan_name: The name of the DDoS protection plan.
:type ddos_protection_plan_name: str
:param parameters: Parameters supplied to the update DDoS protection plan resource tags.
:type parameters: ~azure.mgmt.network.v2019_07_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either DdosProtectionPlan or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_07_01.models.DdosProtectionPlan]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosProtectionPlan"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
ddos_protection_plan_name=ddos_protection_plan_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('DdosProtectionPlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'ddosProtectionPlanName': self._serialize.url("ddos_protection_plan_name", ddos_protection_plan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans/{ddosProtectionPlanName}'} # type: ignore
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.DdosProtectionPlanListResult"]
"""Gets all DDoS protection plans in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DdosProtectionPlanListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_07_01.models.DdosProtectionPlanListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosProtectionPlanListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('DdosProtectionPlanListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/ddosProtectionPlans'} # type: ignore
def
|
(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.DdosProtectionPlanListResult"]
"""Gets all the DDoS protection plans in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DdosProtectionPlanListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_07_01.models.DdosProtectionPlanListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DdosProtectionPlanListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('DdosProtectionPlanListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosProtectionPlans'} # type: ignore
|
list_by_resource_group
|
maf_covered_regions.py
|
#!/usr/bin/env python
"""
Read a maf file and print the regions covered to a set of bed files (one for
each sequence source referenced in the maf). Only blocks with a positive
percent identity are written out.
TODO: Can this be generalized to be made more useful?
|
"""
from __future__ import division, print_function
import sys
import bx.align.maf
import psyco_full
def block_pid( comp1, comp2 ):
match = 0
total = 0
t1 = comp1.text.lower()
t2 = comp2.text.lower()
for i in range( 0, len(t1) ):
a, b = t1[i], t2[i]
if a == '-' or b == '-':
continue
elif a == b:
match += 1
total += 1
if total == 0: return None
return ( match / total )
def main():
out_prefix = sys.argv[1]
print(out_prefix)
out_files = dict()
for block in bx.align.maf.Reader( sys.stdin ):
ref_comp = block.components[0]
ref_chrom = ref_comp.src.split('.')[1]
for comp in block.components[1:]:
comp_species, comp_chrom = comp.src.split('.')[:2]
if comp_species not in out_files:
f = open( "%s%s.bed" % ( out_prefix, comp_species ), "w" )
out_files[comp_species] = f
pid = block_pid( ref_comp, comp )
if pid:
out_files[comp_species].write( "%s\t%d\t%d\t%s:%d-%d,%s\t%f\n" %
( ref_chrom, ref_comp.forward_strand_start, ref_comp.forward_strand_end, \
comp_chrom, comp.start, comp.end, comp.strand, pid ) )
for f in out_files.values():
f.close()
if __name__ == "__main__":
main()
|
usage: %prog bed_outfile_prefix < maf
|
models.py
|
import json
import csv
import pickle
class ArticleFromJson: # pragma: no cover
"""Data model for one article"""
def __init__(
self,
publisher: str,
title: str,
description: str,
url: str,
date_published,
content: str,
author: str = 'Not Found',
):
self.publisher = publisher
self.title = title
self.description = description
self.url = url
self.date_published = date_published
self.content = content
self.author = author
def __eq__(self, other):
if not isinstance(other, type(self)):
return NotImplemented
return self.__dict__ == other.__dict__
class Articles: # pragma: no cover
"""Model to contain a list of article data.
Also has functions to serialize that data"""
def __init__(self, articles: list):
|
def __add__(self, other: 'Articles'):
articles = self.articles + other.articles
return Articles(articles)
def __radd__(self, other: 'Articles'):
articles = self.articles + other.articles
return Articles(articles)
def to_csv(self, csv_name: str):
"""Create a .csv file from the articles data to better visualize"""
with open(csv_name, "w") as f:
writer = csv.DictWriter(f, vars(self.articles[0]).keys())
writer.writeheader()
for article in self.articles:
writer.writerow(vars(article))
f.close()
def to_json(self):
"""Serializes the article objects to json"""
article_list = [vars(article) for article in self.articles]
return json.dumps({"articles": article_list})
def to_pickle(self, pickle_name: str):
"""Serialization of article objects to byte stream"""
with open(pickle_name, "wb") as f:
pickle.dump(self.articles, f)
f.close()
def __eq__(self, other):
if not isinstance(other, type(self)):
return NotImplemented
for index, article in enumerate(self.articles):
if article != other.articles[index]:
return False
return True
|
self.articles = articles
|
operators.py
|
#------------------------------------------------------------------------------------------#
# This file is part of Pyccel which is released under MIT License. See the LICENSE file or #
# go to https://github.com/pyccel/pyccel/blob/master/LICENSE for full license details. #
#------------------------------------------------------------------------------------------#
# TODO [EB 12.03.21]: Remove pylint command with PR #797
# pylint: disable=W0201
"""
Module handling all python builtin operators
These operators all have a precision as detailed here:
https://docs.python.org/3/reference/expressions.html#operator-precedence
They also have specific rules to determine the dtype, precision, rank, shape
"""
from ..errors.errors import Errors, PyccelSemanticError
from .basic import PyccelAstNode
from .datatypes import (NativeBool, NativeInteger, NativeReal,
NativeComplex, NativeString, default_precision,
NativeNumeric)
from .literals import Literal, LiteralInteger, LiteralFloat, LiteralComplex, Nil
from .literals import convert_to_literal
errors = Errors()
__all__ = (
'PyccelOperator',
'PyccelPow',
'PyccelAdd',
'PyccelMinus',
'PyccelMul',
'PyccelDiv',
'PyccelMod',
'PyccelFloorDiv',
'PyccelEq',
'PyccelNe',
'PyccelLt',
'PyccelLe',
'PyccelGt',
'PyccelGe',
'PyccelAnd',
'PyccelOr',
'PyccelNot',
'PyccelAssociativeParenthesis',
'PyccelUnary',
'PyccelUnarySub',
'Relational',
'PyccelIs',
'PyccelIsNot',
'IfTernaryOperator'
)
#==============================================================================
def broadcast(shape_1, shape_2):
""" This function broadcast two shapes using numpy broadcasting rules """
from pyccel.ast.sympy_helper import pyccel_to_sympy
a = len(shape_1)
b = len(shape_2)
if a>b:
new_shape_2 = (LiteralInteger(1),)*(a-b) + tuple(shape_2)
new_shape_1 = shape_1
elif b>a:
new_shape_1 = (LiteralInteger(1),)*(b-a) + tuple(shape_1)
new_shape_2 = shape_2
else:
new_shape_2 = shape_2
new_shape_1 = shape_1
new_shape = []
for e1,e2 in zip(new_shape_1, new_shape_2):
used_names = set()
symbol_map = {}
sy_e1 = pyccel_to_sympy(e1, symbol_map, used_names)
sy_e2 = pyccel_to_sympy(e2, symbol_map, used_names)
if sy_e1 == sy_e2:
new_shape.append(e1)
elif sy_e1 == 1:
new_shape.append(e2)
elif sy_e2 == 1:
new_shape.append(e1)
elif sy_e1.is_constant() and not sy_e2.is_constant():
new_shape.append(e1)
elif sy_e2.is_constant() and not sy_e1.is_constant():
new_shape.append(e2)
elif not sy_e2.is_constant() and not sy_e1.is_constant()\
and not (sy_e1 - sy_e2).is_constant():
new_shape.append(e1)
else:
shape1_code = '({})'.format(' '.join([str(s)+',' for s in shape_1]))
shape2_code = '({})'.format(' '.join([str(s)+',' for s in shape_2]))
msg = 'operands could not be broadcast together with shapes {} {}'
msg = msg.format(shape1_code, shape2_code)
raise PyccelSemanticError(msg)
return tuple(new_shape)
#==============================================================================
class PyccelOperator(PyccelAstNode):
"""
Abstract superclass for all builtin operators.
The __init__ function is common
but the functions called by __init__ are specialised
Parameters
----------
args: tuple
The arguments passed to the operator
"""
__slots__ = ('_args', )
_attribute_nodes = ('_args',)
def __init__(self, *args):
self._args = tuple(self._handle_precedence(args))
if self.stage == 'syntactic':
super().__init__()
return
self._set_dtype()
self._set_shape_rank()
# rank is None for lambda functions
self._set_order()
super().__init__()
def _set_dtype(self):
self._dtype, self._precision = self._calculate_dtype(*self._args) # pylint: disable=no-member
def _set_shape_rank(self):
self._shape, self._rank = self._calculate_shape_rank(*self._args) # pylint: disable=no-member
@property
def precedence(self):
""" The precedence of the operator as defined here:
https://docs.python.org/3/reference/expressions.html#operator-precedence
"""
return self._precedence
def _handle_precedence(self, args):
"""
Insert parentheses where necessary by examining the precedence of the operator
e.g:
PyccelMul(a,PyccelAdd(b,c))
means:
a*(b+c)
so this input will give:
PyccelMul(a, PyccelAssociativeParenthesis(PyccelAdd(b,c)))
Parentheses are also added were they are required for clarity
Parameters
----------
args: tuple
The arguments passed to the operator
Results
-------
args: tuple
The arguments with the parentheses inserted
"""
precedence = [getattr(a, 'precedence', 17) for a in args]
if min(precedence) <= self._precedence:
new_args = []
for i, (a,p) in enumerate(zip(args, precedence)):
if (p < self._precedence or (p == self._precedence and i != 0)):
new_args.append(PyccelAssociativeParenthesis(a))
else:
new_args.append(a)
args = tuple(new_args)
return args
def __str__(self):
return repr(self)
def _set_order(self):
""" Sets the shape and rank
This is chosen to match the arguments if they are in agreement.
Otherwise it defaults to 'C'
"""
if self._rank is not None and self._rank > 1:
if all(a.order == self._args[0].order for a in self._args):
self._order = self._args[0].order
else:
self._order = 'C'
else:
self._order = None
@property
def args(self):
""" Arguments of the operator
"""
return self._args
#==============================================================================
class PyccelUnaryOperator(PyccelOperator):
""" Abstract superclass representing a python
operator with only one argument
Parameters
----------
arg: PyccelAstNode
The argument passed to the operator
"""
__slots__ = ('_dtype', '_precision','_shape','_rank','_order')
def __init__(self, arg):
super().__init__(arg)
@staticmethod
def _calculate_dtype(*args):
""" Sets the dtype and precision
They are chosen to match the argument
"""
a = args[0]
dtype = a.dtype
precision = a.precision
return dtype, precision
@staticmethod
def _calculate_shape_rank(*args):
""" Sets the shape and rank
They are chosen to match the argument
"""
a = args[0]
rank = a.rank
shape = a.shape
return shape, rank
#==============================================================================
class PyccelUnary(PyccelUnaryOperator):
"""
Class representing a call to the python positive operator.
I.e:
+a
is equivalent to:
PyccelUnary(a)
Parameters
----------
arg: PyccelAstNode
The argument passed to the operator
"""
__slots__ = ()
_precedence = 14
def _handle_precedence(self, args):
args = PyccelUnaryOperator._handle_precedence(self, args)
args = tuple(PyccelAssociativeParenthesis(a) if isinstance(a, PyccelUnary) else a for a in args)
return args
def __repr__(self):
return '+{}'.format(repr(self.args[0]))
#==============================================================================
class PyccelUnarySub(PyccelUnary):
"""
Class representing a call to the python negative operator.
I.e:
-a
is equivalent to:
PyccelUnarySub(a)
Parameters
----------
arg: PyccelAstNode
The argument passed to the operator
"""
__slots__ = ()
def __repr__(self):
return '-{}'.format(repr(self.args[0]))
#==============================================================================
class PyccelNot(PyccelUnaryOperator):
"""
Class representing a call to the python not operator.
I.e:
not a
is equivalent to:
PyccelNot(a)
Parameters
----------
arg: PyccelAstNode
The argument passed to the operator
"""
__slots__ = ()
_precedence = 6
@staticmethod
def _calculate_dtype(*args):
""" Sets the dtype and precision
They are chosen to match the argument unless the class has
a _dtype or _precision member
"""
dtype = NativeBool()
precision = default_precision['bool']
return dtype, precision
@staticmethod
def _calculate_shape_rank(*args):
""" Sets the shape and rank
They are chosen to match the argument unless the class has
a _shape or _rank member
"""
rank = 0
shape = ()
return shape, rank
def __repr__(self):
return 'not {}'.format(repr(self.args[0]))
#==============================================================================
class PyccelAssociativeParenthesis(PyccelUnaryOperator):
"""
Class representing parentheses
Parameters
----------
arg: PyccelAstNode
The argument in the PyccelAssociativeParenthesis
"""
__slots__ = () # ok
_precedence = 18
def _handle_precedence(self, args):
return args
def __repr__(self):
return '({})'.format(repr(self.args[0]))
#==============================================================================
class PyccelBinaryOperator(PyccelOperator):
""" Abstract superclass representing a python
operator with two arguments
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ('_dtype','_precision','_shape','_rank','_order')
def __init__(self, arg1, arg2, simplify = False):
super().__init__(arg1, arg2)
@classmethod
def _calculate_dtype(cls, *args):
""" Sets the dtype and precision
If one argument is a string then all arguments must be strings
If the arguments are numeric then the dtype and precision
match the broadest type and the largest precision
e.g.
1 + 2j -> PyccelAdd(LiteralInteger, LiteralComplex) -> complex
"""
integers = [a for a in args if a.dtype in (NativeInteger(),NativeBool())]
reals = [a for a in args if a.dtype is NativeReal()]
complexes = [a for a in args if a.dtype is NativeComplex()]
strs = [a for a in args if a.dtype is NativeString()]
if strs:
return cls._handle_str_type(strs)
assert len(integers + reals + complexes) == 0
elif complexes:
return cls._handle_complex_type(complexes)
elif reals:
return cls._handle_real_type(reals)
elif integers:
return cls._handle_integer_type(integers)
else:
raise TypeError('cannot determine the type of {}'.format(args))
@staticmethod
def _handle_str_type(strs):
"""
Set dtype and precision when both arguments are strings
"""
raise TypeError("unsupported operand type(s) for /: 'str' and 'str'")
@staticmethod
def _handle_complex_type(complexes):
"""
Set dtype and precision when the result is complex
"""
dtype = NativeComplex()
precision = max(a.precision for a in complexes)
return dtype, precision
@staticmethod
def _handle_real_type(reals):
"""
Set dtype and precision when the result is real
"""
dtype = NativeReal()
precision = max(a.precision for a in reals)
return dtype, precision
@staticmethod
def _handle_integer_type(integers):
"""
Set dtype and precision when the result is integer
"""
dtype = NativeInteger()
precision = max(a.precision for a in integers)
return dtype, precision
@staticmethod
def _calculate_shape_rank(*args):
""" Sets the shape and rank
Strings must be scalars.
For numeric types the rank and shape is determined according
to numpy broadcasting rules where possible
"""
strs = [a for a in args if a.dtype is NativeString()]
if strs:
other = [a for a in args if a.dtype in (NativeInteger(), NativeBool(), NativeReal(), NativeComplex())]
assert len(other) == 0
rank = 0
shape = ()
else:
ranks = [a.rank for a in args]
shapes = [a.shape for a in args]
if None in ranks:
rank = None
shape = None
elif all(sh is not None for tup in shapes for sh in tup):
s = broadcast(args[0].shape, args[1].shape)
shape = s
rank = len(s)
else:
rank = max(a.rank for a in args)
shape = [None]*rank
return shape, rank
#==============================================================================
class PyccelArithmeticOperator(PyccelBinaryOperator):
""" Abstract superclass representing a python
arithmetic operator
This class is necessary to handle specific precedence
rules for arithmetic operators
I.e. to handle the error:
Extension: Unary operator following arithmetic operator (use parentheses)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
def _handle_precedence(self, args):
args = PyccelBinaryOperator._handle_precedence(self, args)
args = tuple(PyccelAssociativeParenthesis(a) if isinstance(a, PyccelUnary) else a for a in args)
return args
#==============================================================================
class PyccelPow(PyccelArithmeticOperator):
"""
Class representing a call to the python exponent operator.
I.e:
a ** b
is equivalent to:
PyccelPow(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
_precedence = 15
def __repr__(self):
return '{} ** {}'.format(self.args[0], self.args[1])
#==============================================================================
class PyccelAdd(PyccelArithmeticOperator):
"""
Class representing a call to the python addition operator.
I.e:
a + b
is equivalent to:
PyccelAdd(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
_precedence = 12
def __new__(cls, arg1, arg2, simplify = False):
if simplify:
if isinstance(arg2, PyccelUnarySub):
return PyccelMinus(arg1, arg2.args[0], simplify = True)
dtype, precision = cls._calculate_dtype(arg1, arg2)
if isinstance(arg1, Literal) and isinstance(arg2, Literal):
return convert_to_literal(arg1.python_value + arg2.python_value,
dtype, precision)
if dtype == arg2.dtype and precision == arg2.precision and \
isinstance(arg1, Literal) and arg1.python_value == 0:
return arg2
if dtype == arg1.dtype and precision == arg1.precision and \
isinstance(arg2, Literal) and arg2.python_value == 0:
return arg1
if isinstance(arg1, (LiteralInteger, LiteralFloat)) and \
isinstance(arg2, LiteralComplex) and \
arg2.real == LiteralFloat(0):
return LiteralComplex(arg1, arg2.imag)
elif isinstance(arg2, (LiteralInteger, LiteralFloat)) and \
isinstance(arg1, LiteralComplex) and \
arg1.real == LiteralFloat(0):
return LiteralComplex(arg2, arg1.imag)
else:
return super().__new__(cls)
@staticmethod
def _handle_str_type(strs):
dtype = NativeString()
precision = None
return dtype, precision
def __repr__(self):
return '{} + {}'.format(self.args[0], self.args[1])
#==============================================================================
class PyccelMul(PyccelArithmeticOperator):
"""
Class representing a call to the python multiplication operator.
I.e:
a * b
is equivalent to:
PyccelMul(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
_precedence = 13
def __new__(cls, arg1, arg2, simplify = False):
if simplify:
if (arg1 == 1):
return arg2
if (arg2 == 1):
return arg1
if (arg1 == 0 or arg2 == 0):
dtype, precision = cls._calculate_dtype(arg1, arg2)
return convert_to_literal(0, dtype, precision)
if (isinstance(arg1, PyccelUnarySub) and arg1.args[0] == 1):
return PyccelUnarySub(arg2)
if (isinstance(arg2, PyccelUnarySub) and arg2.args[0] == 1):
return PyccelUnarySub(arg1)
if isinstance(arg1, Literal) and isinstance(arg2, Literal):
dtype, precision = cls._calculate_dtype(arg1, arg2)
return convert_to_literal(arg1.python_value * arg2.python_value,
dtype, precision)
return super().__new__(cls)
def __repr__(self):
|
#==============================================================================
class PyccelMinus(PyccelArithmeticOperator):
"""
Class representing a call to the python subtraction operator.
I.e:
a - b
is equivalent to:
PyccelMinus(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
_precedence = 12
def __new__(cls, arg1, arg2, simplify = False):
if simplify:
if isinstance(arg2, PyccelUnarySub):
return PyccelAdd(arg1, arg2.args[0], simplify = True)
elif isinstance(arg1, Literal) and isinstance(arg2, Literal):
dtype, precision = cls._calculate_dtype(arg1, arg2)
return convert_to_literal(arg1.python_value - arg2.python_value,
dtype, precision)
if isinstance(arg1, LiteralFloat) and \
isinstance(arg2, LiteralComplex) and \
arg2.real == LiteralFloat(0):
return LiteralComplex(arg1, -arg2.imag.python_value)
elif isinstance(arg2, LiteralFloat) and \
isinstance(arg1, LiteralComplex) and \
arg1.real == LiteralFloat(0):
return LiteralComplex(-arg2.python_value, arg1.imag)
else:
return super().__new__(cls)
def __repr__(self):
return '{} - {}'.format(repr(self.args[0]), repr(self.args[1]))
#==============================================================================
class PyccelDiv(PyccelArithmeticOperator):
"""
Class representing a call to the python division operator.
I.e:
a / b
is equivalent to:
PyccelDiv(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
_precedence = 13
def __new__(cls, arg1, arg2, simplify=False):
if simplify:
if (arg2 == 1):
return arg1
return super().__new__(cls)
@staticmethod
def _handle_integer_type(integers):
dtype = NativeReal()
precision = default_precision['real']
return dtype, precision
def __repr__(self):
return '{} + {}'.format(self.args[0], self.args[1])
def __repr__(self):
return '{} / {}'.format(self.args[0], self.args[1])
#==============================================================================
class PyccelMod(PyccelArithmeticOperator):
"""
Class representing a call to the python modulo operator.
I.e:
a % b
is equivalent to:
PyccelMod(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
_precedence = 13
def __repr__(self):
return '{} % {}'.format(self.args[0], self.args[1])
#==============================================================================
class PyccelFloorDiv(PyccelArithmeticOperator):
"""
Class representing a call to the python integer division operator.
I.e:
a // b
is equivalent to:
PyccelFloorDiv(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
_precedence = 13
def __repr__(self):
return '{} // {}'.format(self.args[0], self.args[1])
#==============================================================================
class PyccelComparisonOperator(PyccelBinaryOperator):
""" Abstract superclass representing a python
comparison operator with two arguments
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
_precedence = 7
@staticmethod
def _calculate_dtype(*args):
dtype = NativeBool()
precision = default_precision['bool']
return dtype, precision
#==============================================================================
class PyccelEq(PyccelComparisonOperator):
"""
Class representing a call to the python equality operator.
I.e:
a == b
is equivalent to:
PyccelEq(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
def __repr__(self):
return '{} == {}'.format(self.args[0], self.args[1])
class PyccelNe(PyccelComparisonOperator):
"""
Class representing a call to the python inequality operator.
I.e:
a != b
is equivalent to:
PyccelEq(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
def __repr__(self):
return '{} != {}'.format(self.args[0], self.args[1])
class PyccelLt(PyccelComparisonOperator):
"""
Class representing a call to the python less than operator.
I.e:
a < b
is equivalent to:
PyccelEq(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
def __repr__(self):
return '{} < {}'.format(self.args[0], self.args[1])
class PyccelLe(PyccelComparisonOperator):
"""
Class representing a call to the python less or equal operator.
I.e:
a <= b
is equivalent to:
PyccelEq(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
def __repr__(self):
return '{} <= {}'.format(self.args[0], self.args[1])
class PyccelGt(PyccelComparisonOperator):
"""
Class representing a call to the python greater than operator.
I.e:
a > b
is equivalent to:
PyccelEq(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
def __repr__(self):
return '{} > {}'.format(self.args[0], self.args[1])
class PyccelGe(PyccelComparisonOperator):
"""
Class representing a call to the python greater or equal operator.
I.e:
a >= b
is equivalent to:
PyccelEq(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
def __repr__(self):
return '{} >= {}'.format(self.args[0], self.args[1])
#==============================================================================
class PyccelBooleanOperator(PyccelOperator):
""" Abstract superclass representing a python
boolean operator with two arguments
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
dtype = NativeBool()
precision = default_precision['bool']
rank = 0
shape = ()
order = None
__slots__ = ()
def _set_order(self):
pass
def _set_dtype(self):
pass
def _set_shape_rank(self):
pass
#==============================================================================
class PyccelAnd(PyccelBooleanOperator):
"""
Class representing a call to the python AND operator.
I.e:
a and b
is equivalent to:
PyccelAnd(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
_precedence = 5
def _handle_precedence(self, args):
args = PyccelBooleanOperator._handle_precedence(self, args)
args = tuple(PyccelAssociativeParenthesis(a) if isinstance(a, PyccelOr) else a for a in args)
return args
def __repr__(self):
return '{} and {}'.format(self.args[0], self.args[1])
#==============================================================================
class PyccelOr(PyccelBooleanOperator):
"""
Class representing a call to the python OR operator.
I.e:
a or b
is equivalent to:
PyccelOr(a, b)
Parameters
----------
arg1: PyccelAstNode
The first argument passed to the operator
arg2: PyccelAstNode
The second argument passed to the operator
"""
__slots__ = ()
_precedence = 4
def _handle_precedence(self, args):
args = PyccelBooleanOperator._handle_precedence(self, args)
args = tuple(PyccelAssociativeParenthesis(a) if isinstance(a, PyccelAnd) else a for a in args)
return args
def __repr__(self):
return '{} or {}'.format(self.args[0], self.args[1])
#==============================================================================
class PyccelIs(PyccelBooleanOperator):
"""Represents a is expression in the code.
Examples
--------
>>> from pyccel.ast.operators import PyccelIs
>>> from pyccel.ast.literals import Nil
>>> from pyccel.ast.internals import PyccelSymbol
>>> x = PyccelSymbol('x')
>>> PyccelIs(x, Nil())
PyccelIs(x, None)
"""
__slots__ = ()
_precedence = 7
def __init__(self, arg1, arg2):
super().__init__(arg1, arg2)
@property
def lhs(self):
""" First operator argument"""
return self._args[0]
@property
def rhs(self):
""" First operator argument"""
return self._args[1]
def __repr__(self):
return '{} is {}'.format(self.args[0], self.args[1])
#==============================================================================
class PyccelIsNot(PyccelIs):
"""Represents a is expression in the code.
Examples
--------
>>> from pyccel.ast.operators import PyccelIsNot
>>> from pyccel.ast.literals import Nil
>>> from pyccel.ast.internals import PyccelSymbol
>>> x = PyccelSymbol('x')
>>> PyccelIsNot(x, Nil())
PyccelIsNot(x, None)
"""
__slots__ = ()
def __repr__(self):
return '{} is not {}'.format(self.args[0], self.args[1])
#==============================================================================
class IfTernaryOperator(PyccelOperator):
"""Represent a ternary conditional operator in the code, of the form (a if cond else b)
Parameters
----------
args :
args : type list
format : condition , value_if_true, value_if_false
Examples
--------
>>> from pyccel.ast.internals import PyccelSymbol
>>> from pyccel.ast.core import Assign
>>> from pyccel.ast.operators import IfTernaryOperator
>>> n = PyccelSymbol('n')
>>> x = 5 if n > 1 else 2
>>> IfTernaryOperator(PyccelGt(n > 1), 5, 2)
IfTernaryOperator(PyccelGt(n > 1), 5, 2)
"""
__slots__ = ('_dtype','_precision','_shape','_rank','_order')
_precedence = 3
def __init__(self, cond, value_true, value_false):
super().__init__(cond, value_true, value_false)
if self.stage == 'syntactic':
return
if isinstance(value_true , Nil) or isinstance(value_false, Nil):
errors.report('None is not implemented for Ternary Operator', severity='fatal')
if isinstance(value_true , NativeString) or isinstance(value_false, NativeString):
errors.report('String is not implemented for Ternary Operator', severity='fatal')
if value_true.dtype != value_false.dtype:
if value_true.dtype not in NativeNumeric or value_false.dtype not in NativeNumeric:
errors.report('The types are incompatible in IfTernaryOperator', severity='fatal')
if value_false.rank != value_true.rank :
errors.report('Ternary Operator results should have the same rank', severity='fatal')
if value_false.shape != value_true.shape :
errors.report('Ternary Operator results should have the same shape', severity='fatal')
@staticmethod
def _calculate_dtype(cond, value_true, value_false):
"""
Sets the dtype and precision for IfTernaryOperator
"""
if value_true.dtype in NativeNumeric and value_false.dtype in NativeNumeric:
dtype = max([value_true.dtype, value_false.dtype], key = NativeNumeric.index)
else:
dtype = value_true.dtype
precision = max([value_true.precision, value_false.precision])
return dtype, precision
@staticmethod
def _calculate_shape_rank(cond, value_true, value_false):
"""
Sets the shape and rank and the order for IfTernaryOperator
"""
shape = value_true.shape
rank = value_true.rank
if rank is not None and rank > 1:
if value_false.order != value_true.order :
errors.report('Ternary Operator results should have the same order', severity='fatal')
return shape, rank
@property
def cond(self):
"""
The condition property for IfTernaryOperator class
"""
return self._args[0]
@property
def value_true(self):
"""
The value_if_cond_true property for IfTernaryOperator class
"""
return self._args[1]
@property
def value_false(self):
"""
The value_if_cond_false property for IfTernaryOperator class
"""
return self._args[2]
#==============================================================================
Relational = (PyccelEq, PyccelNe, PyccelLt, PyccelLe, PyccelGt, PyccelGe, PyccelAnd, PyccelOr, PyccelNot, PyccelIs, PyccelIsNot)
|
return '{} * {}'.format(self.args[0], self.args[1])
|
mainFunctions.py
|
import numpy as np
import torch
import torch.nn as nn
from torch.nn import functional as F
from tqdm import tqdm
class _BaseWrapper():
def __init__(self, model):
super().__init__()
self.model = model
self.handlers = []
def forward(self, images):
self.image_shape = images.shape[2:]
print(self.image_shape)
self.logits = self.model(images)
self.probs = F.softmax(self.logits, dim=1)
return self.probs.sort(dim=1, descending=True)
def backward(self, ids):
one_hot = F.one_hot(ids, self.logits.shape[-1])
one_hot = one_hot.squeeze()
self.model.zero_grad()
self.logits.backward(gradient=one_hot, retain_graph=True)
# gradient는 해당 index에 대해서만 미분을 통한 backpropagation을 하겠다는 의미이다.
# 즉, 내가 확인하고 싶은 class에 대해서 featuremap이 얼마나 영향을 미쳤는지 확인할 수 있다.
def generate(self):
raise NotImplementedError
class GradCAM(_BaseWrapper):
def __init__(self, model, layers=None):
super().__init__(model)
self.feature_map = {}
self.grad_map = {}
self.layers = layers
def save_fmaps(key):
def forward_hook(module, input, output):
self.feature_map[key]=output.detach()
return forward_hook
def save_grads(key):
def backward_hook(modeul, grad_in, grad_out):
self.grad_map[key] = grad_out[0].detach()
return backward_hook
for name, module in self.model.named_modules():
if self.layers is None or name in self.layers:
self.handlers.append(module.register_forward_hook(save_fmaps(name)))
self.handlers.append(module.register_backward_hook(save_grads(name)))
def findLayers(self, layers, target_layer):
if target_layer in layers.keys():
return layers[target_layer]
else:
raise ValueError(f"{target_layer} not exists")
def generate(self, target_layer):
feature_maps = self.findLayers(self.feature_map, target_layer)
grad_maps = self.findLayers(self.grad_map, target_layer)
weights = F.adaptive_avg_pool2d(grad_maps, 1)
grad_cam = torch.mul(feature_maps, weights).sum(dim=1, keepdim=True)
grad_cam = F.relu(grad_cam)
grad_cam = F.interpolate(grad_cam, self.image_shape, mode="bilinear", align_corners=False)
B, C, H, W = grad_cam.shape
# C는 1인듯?
|
grad_cam /= grad_cam.max(dim=1, keepdim=True)[0]
grad_cam = grad_cam.view(B, C, H, W)
return grad_cam
|
grad_cam = grad_cam.view(B, -1)
grad_cam -= grad_cam.min(dim=1, keepdim=True)[0]
# 양수 만들어주려고 하는듯
|
network.py
|
# -*- coding: utf-8 -*-
import netifaces
from i3pystatus import IntervalModule
from i3pystatus.core.color import ColorRangeModule
from i3pystatus.core.util import make_graph, round_dict, make_bar
|
while (integer):
integer &= integer - 1
bits += 1
return bits
def v6_to_int(v6):
return int(v6.replace(":", ""), 16)
def prefix6(mask):
return count_bits(v6_to_int(mask))
def cidr6(addr, mask):
return "{addr}/{bits}".format(addr=addr, bits=prefix6(mask))
def v4_to_int(v4):
sum = 0
mul = 1
for part in reversed(v4.split(".")):
sum += int(part) * mul
mul *= 2 ** 8
return sum
def prefix4(mask):
return count_bits(v4_to_int(mask))
def cidr4(addr, mask):
return "{addr}/{bits}".format(addr=addr, bits=prefix4(mask))
def get_bonded_slaves():
try:
with open("/sys/class/net/bonding_masters") as f:
masters = f.read().split()
except FileNotFoundError:
return {}
slaves = {}
for master in masters:
with open("/sys/class/net/{}/bonding/slaves".format(master)) as f:
for slave in f.read().split():
slaves[slave] = master
return slaves
def sysfs_interface_up(interface, unknown_up=False):
try:
with open("/sys/class/net/{}/operstate".format(interface)) as f:
status = f.read().strip()
except FileNotFoundError:
# Interface doesn't exist
return False
return status == "up" or unknown_up and status == "unknown"
class NetworkInfo():
"""
Retrieve network information.
"""
def __init__(self, interface, ignore_interfaces, detached_down, unknown_up, get_wifi_info=False):
if interface not in netifaces.interfaces() and not detached_down:
raise RuntimeError(
"Unknown interface {iface}!".format(iface=interface))
self.ignore_interfaces = ignore_interfaces
self.detached_down = detached_down
self.unknown_up = unknown_up
self.get_wifi_info = get_wifi_info
def get_info(self, interface):
format_dict = dict(v4="", v4mask="", v4cidr="", v6="", v6mask="", v6cidr="")
iface_up = sysfs_interface_up(interface, self.unknown_up)
if not iface_up:
return format_dict
network_info = netifaces.ifaddresses(interface)
slaves = get_bonded_slaves()
try:
master = slaves[interface]
except KeyError:
pass
else:
if sysfs_interface_up(interface, self.unknown_up):
master_info = netifaces.ifaddresses(master)
for af in (netifaces.AF_INET, netifaces.AF_INET6):
try:
network_info[af] = master_info[af]
except KeyError:
pass
try:
mac = network_info[netifaces.AF_PACKET][0]["addr"]
except KeyError:
mac = "NONE"
format_dict['mac'] = mac
if iface_up:
format_dict.update(self.extract_network_info(network_info))
format_dict.update(self.extract_wireless_info(interface))
return format_dict
@staticmethod
def extract_network_info(network_info):
info = dict()
if netifaces.AF_INET in network_info:
v4 = network_info[netifaces.AF_INET][0]
info["v4"] = v4["addr"]
info["v4mask"] = v4["netmask"]
info["v4cidr"] = cidr4(v4["addr"], v4["netmask"])
if netifaces.AF_INET6 in network_info:
for v6 in network_info[netifaces.AF_INET6]:
info["v6"] = v6["addr"]
info["v6mask"] = v6["netmask"]
info["v6cidr"] = cidr6(v6["addr"], v6["netmask"])
if not v6["addr"].startswith("fe80::"): # prefer non link-local addresses
break
return info
def extract_wireless_info(self, interface):
info = dict(essid="", freq="", quality=0.0, quality_bar="")
# Just return empty values if we're not using any Wifi functionality
if not self.get_wifi_info:
return info
import basiciw
try:
iwi = basiciw.iwinfo(interface)
except Exception:
# Not a wireless interface
return info
info["essid"] = iwi["essid"]
info["freq"] = iwi["freq"]
quality = iwi["quality"]
if quality["quality_max"] > 0:
info["quality"] = quality["quality"] / quality["quality_max"]
else:
info["quality"] = quality["quality"]
info["quality"] *= 100
info["quality_bar"] = make_bar(info["quality"])
info["quality"] = round(info["quality"])
return info
class NetworkTraffic():
"""
Retrieve network traffic information
"""
pnic = None
pnic_before = None
def __init__(self, unknown_up, divisor, round_size):
self.unknown_up = unknown_up
self.divisor = divisor
self.round_size = round_size
def update_counters(self, interface):
import psutil
self.pnic_before = self.pnic
counters = psutil.net_io_counters(pernic=True)
self.pnic = counters[interface] if interface in counters else None
def clear_counters(self):
self.pnic_before = None
self.pnic = None
def get_bytes_sent(self):
return (self.pnic.bytes_sent - self.pnic_before.bytes_sent) / self.divisor
def get_bytes_received(self):
return (self.pnic.bytes_recv - self.pnic_before.bytes_recv) / self.divisor
def get_packets_sent(self):
return self.pnic.packets_sent - self.pnic_before.packets_sent
def get_packets_received(self):
return self.pnic.packets_recv - self.pnic_before.packets_recv
def get_usage(self, interface):
self.update_counters(interface)
usage = dict(bytes_sent=0, bytes_recv=0, packets_sent=0, packets_recv=0)
if not sysfs_interface_up(interface, self.unknown_up) or not self.pnic_before:
return usage
else:
usage["bytes_sent"] = self.get_bytes_sent()
usage["bytes_recv"] = self.get_bytes_received()
usage["packets_sent"] = self.get_packets_sent()
usage["packets_recv"] = self.get_packets_received()
round_dict(usage, self.round_size)
return usage
class Network(IntervalModule, ColorRangeModule):
"""
Displays network information for an interface.
Requires the PyPI packages `psutil`, `colour`, `netifaces` and `basiciw`
.. rubric:: Available formatters
Network Traffic Formatters:
* `{interface}` — the configured network interface
* `{kbs}` – Float representing kb\s
* `{network_graph}` – Unicode graph representing network usage
* `{bytes_sent}` — bytes sent per second (divided by divisor)
* `{bytes_recv}` — bytes received per second (divided by divisor)
* `{packets_sent}` — bytes sent per second (divided by divisor)
* `{packets_recv}` — bytes received per second (divided by divisor)
Network Information Formatters:
* `{interface}` — same as setting
* `{v4}` — IPv4 address
* `{v4mask}` — subnet mask
* `{v4cidr}` — IPv4 address in cidr notation (i.e. 192.168.2.204/24)
* `{v6}` — IPv6 address
* `{v6mask}` — subnet mask
* `{v6cidr}` — IPv6 address in cidr notation
* `{mac}` — MAC of interface
Wireless Information Formatters:
* `{essid}` — ESSID of currently connected wifi
* `{freq}` — Current frequency
* `{quality}` — Link quality in percent
* `{quality_bar}` —Bar graphically representing link quality
"""
settings = (
("format_up", "format string"),
("format_down", "format string"),
"color_up",
"color_down",
("interface", "Interface to watch, eg 'eth0'"),
("dynamic_color", "Set color dynamically based on network traffic. Note: this overrides color_up"),
("start_color", "Hex or English name for start of color range, eg '#00FF00' or 'green'"),
("end_color", "Hex or English name for end of color range, eg '#FF0000' or 'red'"),
("graph_width", "Width of the network traffic graph"),
("graph_style", "Graph style ('blocks', 'braille-fill', 'braille-peak', or 'braille-snake')"),
("upper_limit",
"Expected max kb/s. This value controls how the network traffic graph is drawn and in what color"),
("graph_type", "Whether to draw the network traffic graph for input or output. "
"Allowed values 'input' or 'output'"),
("divisor", "divide all byte values by this value"),
("ignore_interfaces", "Array of interfaces to ignore when cycling through "
"on click, eg, ['lo']"),
("round_size", "defines number of digits in round"),
("detached_down", "If the interface doesn't exist, display it as if it were down"),
("unknown_up", "If the interface is in unknown state, display it as if it were up"),
)
interval = 1
interface = 'eth0'
format_up = "{interface} {network_graph}{kbs}KB/s"
format_down = "{interface}: DOWN"
color_up = "#00FF00"
color_down = "#FF0000"
dynamic_color = True
graph_type = 'input'
graph_width = 15
graph_style = 'blocks'
upper_limit = 150.0
# Network traffic settings
divisor = 1024
round_size = None
# Network info settings
detached_down = True
unknown_up = False
ignore_interfaces = ["lo"]
on_leftclick = "nm-connection-editor"
on_rightclick = "cycle_interface"
on_upscroll = ['cycle_interface', 1]
on_downscroll = ['cycle_interface', -1]
def init(self):
# Don't require importing basiciw unless using the functionality it offers.
if any(s in self.format_up or s in self.format_up for s in
['essid', 'freq', 'quality', 'quality_bar']):
get_wifi_info = True
else:
get_wifi_info = False
self.network_info = NetworkInfo(self.interface, self.ignore_interfaces, self.detached_down, self.unknown_up,
get_wifi_info)
# Don't require importing psutil unless using the functionality it offers.
if any(s in self.format_up or s in self.format_down for s in
['bytes_sent', 'bytes_recv', 'packets_sent', 'packets_recv', 'network_graph', 'kbs']):
self.network_traffic = NetworkTraffic(self.unknown_up, self.divisor, self.round_size)
else:
self.network_traffic = None
if not self.dynamic_color:
self.end_color = self.start_color
self.colors = self.get_hex_color_range(self.start_color, self.end_color, int(self.upper_limit))
self.kbs_arr = [0.0] * self.graph_width
def cycle_interface(self, increment=1):
interfaces = [i for i in netifaces.interfaces() if i not in self.ignore_interfaces]
if self.interface in interfaces:
next_index = (interfaces.index(self.interface) + increment) % len(interfaces)
self.interface = interfaces[next_index]
elif len(interfaces) > 0:
self.interface = interfaces[0]
if self.network_traffic:
self.network_traffic.clear_counters()
self.kbs_arr = [0.0] * self.graph_width
def get_network_graph(self, kbs):
# Cycle array by inserting at the start and chopping off the last element
self.kbs_arr.insert(0, kbs)
self.kbs_arr = self.kbs_arr[:self.graph_width]
return make_graph(self.kbs_arr, 0.0, self.upper_limit, self.graph_style)
def run(self):
format_values = dict(kbs="", network_graph="", bytes_sent="", bytes_recv="", packets_sent="", packets_recv="",
interface="", v4="", v4mask="", v4cidr="", v6="", v6mask="", v6cidr="", mac="",
essid="", freq="", quality="", quality_bar="")
if self.network_traffic:
network_usage = self.network_traffic.get_usage(self.interface)
format_values.update(network_usage)
if self.graph_type == 'input':
kbs = network_usage['bytes_recv']
elif self.graph_type == 'output':
kbs = network_usage['bytes_sent']
else:
raise Exception("graph_type must be either 'input' or 'output'!")
format_values['network_graph'] = self.get_network_graph(kbs)
format_values['kbs'] = "{0:.1f}".format(round(kbs, 2)).rjust(6)
color = self.get_gradient(kbs, self.colors, self.upper_limit)
else:
color = None
if sysfs_interface_up(self.interface, self.unknown_up):
if not color:
color = self.color_up
format_str = self.format_up
else:
color = self.color_down
format_str = self.format_down
network_info = self.network_info.get_info(self.interface)
format_values.update(network_info)
format_values['interface'] = self.interface
self.output = {
"full_text": format_str.format(**format_values),
'color': color,
}
|
def count_bits(integer):
bits = 0
|
memory.rs
|
use crate::bootboot::{self, MMapEnt};
use crate::{
kdbg_ctx,
light::{kdebug, utils},
};
use core::marker::PhantomData;
use x86_64::{
structures::paging::{FrameAllocator, PageSize, PhysFrame, Size4KiB},
PhysAddr,
};
/// The size of the physical frame to allocate.
pub type FrameSize = Size4KiB;
type Region = (PhysAddr, PhysAddr);
pub static mut FRAME_ALLOCATOR: Option<BootInfoBumpFrameAllocator> = None;
/// Parse the memory map given by the bootloader.
/// ### Safety
/// This function is unsafe, because the caller must ensure that the memory
/// map from the bootloader is properly initialized.
pub unsafe fn parse_memory_map() -> &'static [MMapEnt] {
let num_mmap_entries = ((bootboot::bootboot.size - 128) / 16) as isize;
core::slice::from_raw_parts(
&bootboot::bootboot.mmap as *const bootboot::MMapEnt,
num_mmap_entries as usize,
)
}
/// Get the current frame allocator. Panic if it is not initialized.
/// ### Safety
/// This function is unsafe, because it leaks unprotected mutable
/// references to the allocator.
pub unsafe fn
|
() -> &'static mut BootInfoBumpFrameAllocator {
FRAME_ALLOCATOR.as_mut().expect("Allocator is None")
}
/// Initialize a bump physical frame allocator. The free frames are
/// found via the memory map that the bootloader passes to the kernel.
/// The size of the allocated frame is set to [`FrameSize`].
/// ### Safety
/// This function must be called once and by only one processor.
pub unsafe fn init(memory_map: &'static [bootboot::MMapEnt]) {
FRAME_ALLOCATOR = Some(BootInfoBumpFrameAllocator {
memory_map,
next: 0,
size: PhantomData::<FrameSize>,
})
}
impl bootboot::MMapEnt {
pub fn is_usable(&self) -> bool {
(self.size & 0xF) == 1
}
pub fn get_size(&self) -> u64 {
self.size & !0xF
}
fn to_region(&self) -> Region {
(
PhysAddr::new(self.ptr),
PhysAddr::new(self.ptr + Self::get_size(self)),
)
}
}
pub struct BootInfoBumpFrameAllocator<S: PageSize = FrameSize> {
memory_map: &'static [bootboot::MMapEnt],
next: usize,
size: PhantomData<S>,
}
impl<S: PageSize> BootInfoBumpFrameAllocator<S> {
/// Print the memory regions of this physical frame allocator. These
/// correspond to the regions that the bootloader passed to us.
pub fn print_memory_regions(&self) {
let mut buf = [0u8; 8];
for e in self.memory_map {
kdbg_ctx!(
kdebug::print(b"Region: ")
utils::int_to_bytes_hex(e.ptr, &mut buf)
kdebug::print(&buf)
kdebug::print(b" size: ")
utils::int_to_bytes_hex(e.get_size(), &mut buf)
kdebug::print(&buf)
kdebug::print(b"\n")
)
}
}
fn squeeze_region_bounds((start, end): Region) -> Region {
(start.align_up(S::SIZE), end.align_down(S::SIZE))
}
fn usable_frames(&self) -> impl Iterator<Item = PhysFrame<S>> {
self.memory_map
.iter()
.filter(|&e| e.is_usable())
.map(|e| e.to_region())
.map(Self::squeeze_region_bounds)
.map(|r| r.0.as_u64()..r.1.as_u64())
.flat_map(|r| r.step_by(S::SIZE as usize))
.map(|a| PhysFrame::containing_address(PhysAddr::new(a)))
}
}
unsafe impl<S: PageSize> FrameAllocator<S> for BootInfoBumpFrameAllocator<S> {
fn allocate_frame(&mut self) -> Option<PhysFrame<S>> {
let frame = self.usable_frames().nth(self.next);
self.next += 1;
frame
}
}
|
get_frame_allocator
|
publish.go
|
package commands
import (
"errors"
"fmt"
cmds "github.com/jbenet/go-ipfs/commands"
core "github.com/jbenet/go-ipfs/core"
crypto "github.com/jbenet/go-ipfs/crypto"
nsys "github.com/jbenet/go-ipfs/namesys"
u "github.com/jbenet/go-ipfs/util"
)
var errNotOnline = errors.New("This command must be run in online mode. Try running 'ipfs daemon' first.")
var publishCmd = &cmds.Command{
Helptext: cmds.HelpText{
Tagline: "Publish an object to IPNS",
ShortDescription: `
IPNS is a PKI namespace, where names are the hashes of public keys, and
the private key enables publishing new (signed) values. In publish, the
default value of <name> is your own identity public key.
`,
LongDescription: `
IPNS is a PKI namespace, where names are the hashes of public keys, and
the private key enables publishing new (signed) values. In publish, the
default value of <name> is your own identity public key.
Examples:
Publish a <ref> to your identity name:
> ipfs name publish QmatmE9msSfkKxoffpHwNLNKgwZG8eT9Bud6YoPab52vpy
published name QmbCMUZw6JFeZ7Wp9jkzbye3Fzp2GGcPgC3nmeUjfVF87n to QmatmE9msSfkKxoffpHwNLNKgwZG8eT9Bud6YoPab52vpy
Publish a <ref> to another public key:
> ipfs name publish QmbCMUZw6JFeZ7Wp9jkzbye3Fzp2GGcPgC3nmeUjfVF87n QmatmE9msSfkKxoffpHwNLNKgwZG8eT9Bud6YoPab52vpy
published name QmbCMUZw6JFeZ7Wp9jkzbye3Fzp2GGcPgC3nmeUjfVF87n to QmatmE9msSfkKxoffpHwNLNKgwZG8eT9Bud6YoPab52vpy
`,
},
Arguments: []cmds.Argument{
cmds.StringArg("name", false, false, "The IPNS name to publish to. Defaults to your node's peerID"),
cmds.StringArg("ipfs-path", true, false, "IPFS path of the obejct to be published at <name>"),
},
Run: func(req cmds.Request) (interface{}, error) {
log.Debug("Begin Publish")
n, err := req.Context().GetNode()
if err != nil {
return nil, err
}
args := req.Arguments()
if n.Network == nil {
return nil, errNotOnline
}
if n.Identity == nil {
return nil, errors.New("Identity not loaded!")
}
// name := ""
ref := ""
switch len(args) {
case 2:
// name = args[0]
ref = args[1]
return nil, errors.New("keychains not yet implemented")
case 1:
// name = n.Identity.ID.String()
ref = args[0]
}
// TODO n.Keychain.Get(name).PrivKey
k := n.Identity.PrivKey()
return publish(n, k, ref)
},
Marshalers: cmds.MarshalerMap{
cmds.Text: func(res cmds.Response) ([]byte, error) {
v := res.Output().(*IpnsEntry)
s := fmt.Sprintf("Published name %s to %s\n", v.Name, v.Value)
return []byte(s), nil
},
},
Type: &IpnsEntry{},
}
func
|
(n *core.IpfsNode, k crypto.PrivKey, ref string) (*IpnsEntry, error) {
pub := nsys.NewRoutingPublisher(n.Routing)
err := pub.Publish(k, ref)
if err != nil {
return nil, err
}
hash, err := k.GetPublic().Hash()
if err != nil {
return nil, err
}
return &IpnsEntry{
Name: u.Key(hash).String(),
Value: ref,
}, nil
}
|
publish
|
waitgroup_test.go
|
package test
import (
"sync"
"testing"
)
var wg sync.WaitGroup
func TestWaitGroup(t *testing.T)
|
{
wg.Wait()
}
|
|
build.rs
|
use std::path::PathBuf;
fn main()
|
{
let includes: &[PathBuf] = &[
PathBuf::from("deps").join("RED4ext.SDK").join("include"),
PathBuf::from("deps").join("glue"),
];
cxx_build::bridge("src/lib.rs")
.includes(includes)
.flag("-std:c++20")
.compile("red4ext-rs");
println!("cargo:rerun-if-changed=src/lib.rs");
println!("cargo:rerun-if-changed=deps/glue/glue.hpp");
}
|
|
pooldata.py
|
import pytest
import brownie
from brownie import Contract, ZERO_ADDRESS
# gusd
gusd_token_address = "0xD2967f45c4f384DEEa880F807Be904762a3DeA07"
gusd_gauge_addresses = "0xC5cfaDA84E902aD92DD40194f0883ad49639b023"
# susd
susd_token_address = '0xC25a3A3b969415c80451098fa907EC722572917F'
susd_gauge_address = '0xA90996896660DEcC6E997655E065b23788857849'
@pytest.fixture(scope="module")
def swap_address(pool_data):
return pool_data['swap_address']
@pytest.fixture(scope="module")
def token_address(pool_data):
return pool_data['lp_token_address']
@pytest.fixture(scope="module")
def gauge_address(pool_data):
return pool_data['gauge_addresses'][0]
@pytest.fixture(scope="module")
def deposit_address(pool_data):
return pool_data['zap_address'] if 'zap_address' in pool_data else pool_data['swap_address']
@pytest.fixture(scope="module")
def other_token_address(pool_data):
return gusd_token_address if gusd_token_address != pool_data["lp_token_address"] else susd_token_address
@pytest.fixture(scope="module")
def other_gauge_address(pool_data):
return gusd_gauge_addresses if gusd_gauge_addresses != pool_data["gauge_addresses"][0] else susd_gauge_address
@pytest.fixture(scope="module")
def gauge(gauge_address):
return Contract(gauge_address)
@pytest.fixture(scope="module")
def underlying_decimals(pool_data, base_pool_data):
# number of decimal places for each underlying coin in the active pool
decimals = [i.get("decimals", i.get("wrapped_decimals")) for i in pool_data["coins"]]
if base_pool_data is None:
return decimals
base_decimals = [i.get("decimals", i.get("wrapped_decimals")) for i in base_pool_data["coins"]]
return decimals[:-1] + base_decimals
@pytest.fixture(scope="module")
def wrapped_decimals(pool_data):
# number of decimal places for each wrapped coin in the active pool
yield [i.get("wrapped_decimals", i.get("decimals")) for i in pool_data["coins"]]
@pytest.fixture(scope="module")
def wrapped_amounts_to_mint(wrapped_decimals):
return [100 * 10 ** i for i in wrapped_decimals]
@pytest.fixture(scope="module")
def underlying_amounts_to_mint(underlying_decimals):
return [100 * 10 ** i for i in underlying_decimals]
@pytest.fixture(scope="module")
def wrong_amounts_to_mint():
|
# Different amounts are needed to always pass test_wrong_order_of_coins
@pytest.fixture(scope="module")
def wrapped_amounts(wrapped_decimals, n_coins_wrapped):
return [(10 + i) * 10 ** wrapped_decimals[i] for i in range(n_coins_wrapped)] + [0] * (5 - n_coins_wrapped)
# Different amounts are needed to always pass test_wrong_order_of_coins
@pytest.fixture(scope="module")
def underlying_amounts(underlying_decimals, n_coins_underlying):
return [(10 + i) * 10 ** underlying_decimals[i] for i in range(n_coins_underlying)] + [0] * (5 - n_coins_underlying)
@pytest.fixture(scope="module")
def n_coins_wrapped(wrapped_decimals):
return len(wrapped_decimals)
@pytest.fixture(scope="module")
def n_coins_underlying(underlying_decimals):
yield len(underlying_decimals)
@pytest.fixture(scope="module")
def value_wrapped(wrapped_amounts, wrapped_coins):
return wrapped_amounts[wrapped_coins.index(brownie.ETH_ADDRESS)] if brownie.ETH_ADDRESS in wrapped_coins else 0
@pytest.fixture(scope="module")
def value_underlying(underlying_amounts, underlying_coins):
return underlying_amounts[underlying_coins.index(brownie.ETH_ADDRESS)] if brownie.ETH_ADDRESS in underlying_coins else 0
@pytest.fixture(scope="module")
def use_underlying(pool_data):
if pool_data['swap_address'] in [
"0xDeBF20617708857ebe4F679508E7b7863a8A8EeE", # aave
"0xeb16ae0052ed37f479f7fe63849198df1765a733", # saave
"0x2dded6Da1BF5DBdF597C45fcFaa3194e53EcfeAF", # ib
"0x8301AE4fc9c624d1D396cbDAa1ed877821D7C511", # crveth (use_eth)
"0xB576491F1E6e5E62f1d8F26062Ee822B40B0E0d4", # cvxeth (use_eth)
]:
return True
return False
@pytest.fixture(scope="module")
def is_meta(pool_data):
return "meta" in pool_data.get("pool_types", [])
@pytest.fixture(scope="module")
def factory_pool_address(pool_data):
return pool_data["swap_address"] if "factory" in pool_data.get("pool_types", []) else ZERO_ADDRESS
|
return [100 * 10 ** 18] * 5
|
jquery.inputmask.min.js
|
/*!
|
* Version: 5.0.6-beta.54
*/
!function(e,t){if("object"==typeof exports&&"object"==typeof module)module.exports=t(require("jquery"));else if("function"==typeof define&&define.amd)define(["jquery"],t);else{var a="object"==typeof exports?t(require("jquery")):t(e.jQuery);for(var i in a)("object"==typeof exports?exports:e)[i]=a[i]}}(this,(function(e){return function(){"use strict";var t={4528:function(e){e.exports=JSON.parse('{"BACKSPACE":8,"BACKSPACE_SAFARI":127,"DELETE":46,"DOWN":40,"END":35,"ENTER":13,"ESCAPE":27,"HOME":36,"INSERT":45,"LEFT":37,"PAGE_DOWN":34,"PAGE_UP":33,"RIGHT":39,"SPACE":32,"TAB":9,"UP":38,"X":88,"Z":90,"CONTROL":17,"PAUSE/BREAK":19,"WINDOWS_LEFT":91,"WINDOWS_RIGHT":92,"KEY_229":229}')},3046:function(e,t,a){var i;Object.defineProperty(t,"__esModule",{value:!0}),t.default=void 0,a(3851),a(219),a(207),a(5296);var n=((i=a(2394))&&i.__esModule?i:{default:i}).default;t.default=n},8741:function(e,t){Object.defineProperty(t,"__esModule",{value:!0}),t.default=void 0;var a=!("undefined"==typeof window||!window.document||!window.document.createElement);t.default=a},3976:function(e,t,a){Object.defineProperty(t,"__esModule",{value:!0}),t.default=void 0;var i,n=(i=a(4528))&&i.__esModule?i:{default:i};var r={_maxTestPos:500,placeholder:"_",optionalmarker:["[","]"],quantifiermarker:["{","}"],groupmarker:["(",")"],alternatormarker:"|",escapeChar:"\\",mask:null,regex:null,oncomplete:function(){},onincomplete:function(){},oncleared:function(){},repeat:0,greedy:!1,autoUnmask:!1,removeMaskOnSubmit:!1,clearMaskOnLostFocus:!0,insertMode:!0,insertModeVisual:!0,clearIncomplete:!1,alias:null,onKeyDown:function(){},onBeforeMask:null,onBeforePaste:function(e,t){return"function"==typeof t.onBeforeMask?t.onBeforeMask.call(this,e,t):e},onBeforeWrite:null,onUnMask:null,showMaskOnFocus:!0,showMaskOnHover:!0,onKeyValidation:function(){},skipOptionalPartCharacter:" ",numericInput:!1,rightAlign:!1,undoOnEscape:!0,radixPoint:"",_radixDance:!1,groupSeparator:"",keepStatic:null,positionCaretOnTab:!0,tabThrough:!1,supportsInputType:["text","tel","url","password","search"],ignorables:[n.default.BACKSPACE,n.default.TAB,n.default["PAUSE/BREAK"],n.default.ESCAPE,n.default.PAGE_UP,n.default.PAGE_DOWN,n.default.END,n.default.HOME,n.default.LEFT,n.default.UP,n.default.RIGHT,n.default.DOWN,n.default.INSERT,n.default.DELETE,93,112,113,114,115,116,117,118,119,120,121,122,123,0,229],isComplete:null,preValidation:null,postValidation:null,staticDefinitionSymbol:void 0,jitMasking:!1,nullable:!0,inputEventOnly:!1,noValuePatching:!1,positionCaretOnClick:"lvp",casing:null,inputmode:"text",importDataAttributes:!0,shiftPositions:!0,usePrototypeDefinitions:!0,validationEventTimeOut:3e3};t.default=r},7392:function(e,t){Object.defineProperty(t,"__esModule",{value:!0}),t.default=void 0;t.default={9:{validator:"[0-9\uff10-\uff19]",definitionSymbol:"*"},a:{validator:"[A-Za-z\u0410-\u044f\u0401\u0451\xc0-\xff\xb5]",definitionSymbol:"*"},"*":{validator:"[0-9\uff10-\uff19A-Za-z\u0410-\u044f\u0401\u0451\xc0-\xff\xb5]"}}},3287:function(e,t,a){Object.defineProperty(t,"__esModule",{value:!0}),t.default=void 0;var i,n=(i=a(8254))&&i.__esModule?i:{default:i};if(void 0===n.default)throw"jQuery not loaded!";var r=n.default;t.default=r},9845:function(e,t,a){Object.defineProperty(t,"__esModule",{value:!0}),t.iphone=t.iemobile=t.mobile=t.ie=t.ua=void 0;var i,n=(i=a(9380))&&i.__esModule?i:{default:i};var r=n.default.navigator&&n.default.navigator.userAgent||"",o=r.indexOf("MSIE ")>0||r.indexOf("Trident/")>0,s="ontouchstart"in n.default,l=/iemobile/i.test(r),u=/iphone/i.test(r)&&!l;t.iphone=u,t.iemobile=l,t.mobile=s,t.ie=o,t.ua=r},7184:function(e,t){Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e){return e.replace(a,"\\$1")};var a=new RegExp("(\\"+["/",".","*","+","?","|","(",")","[","]","{","}","\\","$","^"].join("|\\")+")","gim")},6030:function(e,t,a){Object.defineProperty(t,"__esModule",{value:!0}),t.EventHandlers=void 0;var i,n=a(8711),r=(i=a(4528))&&i.__esModule?i:{default:i},o=a(9845),s=a(7215),l=a(7760),u=a(4713);var c={keydownEvent:function(e){var t=this.inputmask,a=t.opts,i=t.dependencyLib,c=t.maskset,f=this,d=i(f),p=e.keyCode,h=n.caret.call(t,f),m=a.onKeyDown.call(this,e,n.getBuffer.call(t),h,a);if(void 0!==m)return m;if(p===r.default.BACKSPACE||p===r.default.DELETE||o.iphone&&p===r.default.BACKSPACE_SAFARI||e.ctrlKey&&p===r.default.X&&!("oncut"in f))e.preventDefault(),s.handleRemove.call(t,f,p,h),(0,l.writeBuffer)(f,n.getBuffer.call(t,!0),c.p,e,f.inputmask._valueGet()!==n.getBuffer.call(t).join(""));else if(p===r.default.END||p===r.default.PAGE_DOWN){e.preventDefault();var v=n.seekNext.call(t,n.getLastValidPosition.call(t));n.caret.call(t,f,e.shiftKey?h.begin:v,v,!0)}else p===r.default.HOME&&!e.shiftKey||p===r.default.PAGE_UP?(e.preventDefault(),n.caret.call(t,f,0,e.shiftKey?h.begin:0,!0)):a.undoOnEscape&&p===r.default.ESCAPE&&!0!==e.altKey?((0,l.checkVal)(f,!0,!1,t.undoValue.split("")),d.trigger("click")):!0===a.tabThrough&&p===r.default.TAB?!0===e.shiftKey?(h.end=n.seekPrevious.call(t,h.end,!0),!0===u.getTest.call(t,h.end-1).match.static&&h.end--,h.begin=n.seekPrevious.call(t,h.end,!0),h.begin>=0&&h.end>0&&(e.preventDefault(),n.caret.call(t,f,h.begin,h.end))):(h.begin=n.seekNext.call(t,h.begin,!0),h.end=n.seekNext.call(t,h.begin,!0),h.end<c.maskLength&&h.end--,h.begin<=c.maskLength&&(e.preventDefault(),n.caret.call(t,f,h.begin,h.end))):e.shiftKey||a.insertModeVisual&&!1===a.insertMode&&(p===r.default.RIGHT?setTimeout((function(){var e=n.caret.call(t,f);n.caret.call(t,f,e.begin)}),0):p===r.default.LEFT&&setTimeout((function(){var e=n.translatePosition.call(t,f.inputmask.caretPos.begin);n.translatePosition.call(t,f.inputmask.caretPos.end);t.isRTL?n.caret.call(t,f,e+(e===c.maskLength?0:1)):n.caret.call(t,f,e-(0===e?0:1))}),0));t.ignorable=a.ignorables.includes(p)},keypressEvent:function(e,t,a,i,o){var u=this.inputmask||this,c=u.opts,f=u.dependencyLib,d=u.maskset,p=u.el,h=f(p),m=e.which||e.charCode||e.keyCode;if(!(!0===t||e.ctrlKey&&e.altKey)&&(e.ctrlKey||e.metaKey||u.ignorable))return m===r.default.ENTER&&u.undoValue!==u._valueGet(!0)&&(u.undoValue=u._valueGet(!0),setTimeout((function(){h.trigger("change")}),0)),u.skipInputEvent=!0,!0;if(m){44!==m&&46!==m||3!==e.location||""===c.radixPoint||(m=c.radixPoint.charCodeAt(0));var v,g=t?{begin:o,end:o}:n.caret.call(u,p),k=String.fromCharCode(m);d.writeOutBuffer=!0;var y=s.isValid.call(u,g,k,i,void 0,void 0,void 0,t);if(!1!==y&&(n.resetMaskSet.call(u,!0),v=void 0!==y.caret?y.caret:n.seekNext.call(u,y.pos.begin?y.pos.begin:y.pos),d.p=v),v=c.numericInput&&void 0===y.caret?n.seekPrevious.call(u,v):v,!1!==a&&(setTimeout((function(){c.onKeyValidation.call(p,m,y)}),0),d.writeOutBuffer&&!1!==y)){var b=n.getBuffer.call(u);(0,l.writeBuffer)(p,b,v,e,!0!==t)}if(e.preventDefault(),t)return!1!==y&&(y.forwardPosition=v),y}},keyupEvent:function(e){var t=this.inputmask;!t.isComposing||e.keyCode!==r.default.KEY_229&&e.keyCode!==r.default.ENTER||t.$el.trigger("input")},pasteEvent:function(e){var t,a=this.inputmask,i=a.opts,r=a._valueGet(!0),o=n.caret.call(a,this);a.isRTL&&(t=o.end,o.end=o.begin,o.begin=t);var s=r.substr(0,o.begin),u=r.substr(o.end,r.length);if(s==(a.isRTL?n.getBufferTemplate.call(a).slice().reverse():n.getBufferTemplate.call(a)).slice(0,o.begin).join("")&&(s=""),u==(a.isRTL?n.getBufferTemplate.call(a).slice().reverse():n.getBufferTemplate.call(a)).slice(o.end).join("")&&(u=""),window.clipboardData&&window.clipboardData.getData)r=s+window.clipboardData.getData("Text")+u;else{if(!e.clipboardData||!e.clipboardData.getData)return!0;r=s+e.clipboardData.getData("text/plain")+u}var c=r;if("function"==typeof i.onBeforePaste){if(!1===(c=i.onBeforePaste.call(a,r,i)))return e.preventDefault();c||(c=r)}return(0,l.checkVal)(this,!0,!1,c.toString().split(""),e),e.preventDefault()},inputFallBackEvent:function(e){var t=this.inputmask,a=t.opts,i=t.dependencyLib;var s=this,f=s.inputmask._valueGet(!0),d=(t.isRTL?n.getBuffer.call(t).slice().reverse():n.getBuffer.call(t)).join(""),p=n.caret.call(t,s,void 0,void 0,!0);if(d!==f){var h=function(e,i,r){for(var o,s,l,c=e.substr(0,r.begin).split(""),f=e.substr(r.begin).split(""),d=i.substr(0,r.begin).split(""),p=i.substr(r.begin).split(""),h=c.length>=d.length?c.length:d.length,m=f.length>=p.length?f.length:p.length,v="",g=[],k="~";c.length<h;)c.push(k);for(;d.length<h;)d.push(k);for(;f.length<m;)f.unshift(k);for(;p.length<m;)p.unshift(k);var y=c.concat(f),b=d.concat(p);for(s=0,o=y.length;s<o;s++)switch(l=u.getPlaceholder.call(t,n.translatePosition.call(t,s)),v){case"insertText":b[s-1]===y[s]&&r.begin==y.length-1&&g.push(y[s]),s=o;break;case"insertReplacementText":case"deleteContentBackward":y[s]===k?r.end++:s=o;break;default:y[s]!==b[s]&&(y[s+1]!==k&&y[s+1]!==l&&void 0!==y[s+1]||(b[s]!==l||b[s+1]!==k)&&b[s]!==k?b[s+1]===k&&b[s]===y[s+1]?(v="insertText",g.push(y[s]),r.begin--,r.end--):y[s]!==l&&y[s]!==k&&(y[s+1]===k||b[s]!==y[s]&&b[s+1]===y[s+1])?(v="insertReplacementText",g.push(y[s]),r.begin--):y[s]===k?(v="deleteContentBackward",(n.isMask.call(t,n.translatePosition.call(t,s),!0)||b[s]===a.radixPoint)&&r.end++):s=o:(v="insertText",g.push(y[s]),r.begin--,r.end--))}return{action:v,data:g,caret:r}}(f=function(e,a,i){if(o.iemobile){var r=a.replace(n.getBuffer.call(t).join(""),"");if(1===r.length){var s=a.split("");s.splice(i.begin,0,r),a=s.join("")}}return a}(0,f,p),d,p);switch((s.inputmask.shadowRoot||s.ownerDocument).activeElement!==s&&s.focus(),(0,l.writeBuffer)(s,n.getBuffer.call(t)),n.caret.call(t,s,p.begin,p.end,!0),h.action){case"insertText":case"insertReplacementText":h.data.forEach((function(e,a){var n=new i.Event("keypress");n.which=e.charCodeAt(0),t.ignorable=!1,c.keypressEvent.call(s,n)})),setTimeout((function(){t.$el.trigger("keyup")}),0);break;case"deleteContentBackward":var m=new i.Event("keydown");m.keyCode=r.default.BACKSPACE,c.keydownEvent.call(s,m);break;default:(0,l.applyInputValue)(s,f)}e.preventDefault()}},compositionendEvent:function(e){var t=this.inputmask;t.isComposing=!1,t.$el.trigger("input")},setValueEvent:function(e){var t=this.inputmask,a=this,i=e&&e.detail?e.detail[0]:arguments[1];void 0===i&&(i=a.inputmask._valueGet(!0)),(0,l.applyInputValue)(a,i),(e.detail&&void 0!==e.detail[1]||void 0!==arguments[2])&&n.caret.call(t,a,e.detail?e.detail[1]:arguments[2])},focusEvent:function(e){var t=this.inputmask,a=t.opts,i=this,r=i.inputmask._valueGet();a.showMaskOnFocus&&r!==n.getBuffer.call(t).join("")&&(0,l.writeBuffer)(i,n.getBuffer.call(t),n.seekNext.call(t,n.getLastValidPosition.call(t))),!0!==a.positionCaretOnTab||!1!==t.mouseEnter||s.isComplete.call(t,n.getBuffer.call(t))&&-1!==n.getLastValidPosition.call(t)||c.clickEvent.apply(i,[e,!0]),t.undoValue=t._valueGet(!0)},invalidEvent:function(e){this.inputmask.validationEvent=!0},mouseleaveEvent:function(){var e=this.inputmask,t=e.opts,a=this;e.mouseEnter=!1,t.clearMaskOnLostFocus&&(a.inputmask.shadowRoot||a.ownerDocument).activeElement!==a&&(0,l.HandleNativePlaceholder)(a,e.originalPlaceholder)},clickEvent:function(e,t){var a=this.inputmask,i=this;if((i.inputmask.shadowRoot||i.ownerDocument).activeElement===i){var r=n.determineNewCaretPosition.call(a,n.caret.call(a,i),t);void 0!==r&&n.caret.call(a,i,r)}},cutEvent:function(e){var t=this.inputmask,a=t.maskset,i=this,o=n.caret.call(t,i),u=window.clipboardData||e.clipboardData,c=t.isRTL?n.getBuffer.call(t).slice(o.end,o.begin):n.getBuffer.call(t).slice(o.begin,o.end);u.setData("text",t.isRTL?c.reverse().join(""):c.join("")),document.execCommand&&document.execCommand("copy"),s.handleRemove.call(t,i,r.default.DELETE,o),(0,l.writeBuffer)(i,n.getBuffer.call(t),a.p,e,t.undoValue!==t._valueGet(!0))},blurEvent:function(e){var t=this.inputmask,a=t.opts,i=(0,t.dependencyLib)(this),r=this;if(r.inputmask){(0,l.HandleNativePlaceholder)(r,t.originalPlaceholder);var o=r.inputmask._valueGet(),u=n.getBuffer.call(t).slice();""!==o&&(a.clearMaskOnLostFocus&&(-1===n.getLastValidPosition.call(t)&&o===n.getBufferTemplate.call(t).join("")?u=[]:l.clearOptionalTail.call(t,u)),!1===s.isComplete.call(t,u)&&(setTimeout((function(){i.trigger("incomplete")}),0),a.clearIncomplete&&(n.resetMaskSet.call(t),u=a.clearMaskOnLostFocus?[]:n.getBufferTemplate.call(t).slice())),(0,l.writeBuffer)(r,u,void 0,e)),t.undoValue!==t._valueGet(!0)&&(t.undoValue=t._valueGet(!0),i.trigger("change"))}},mouseenterEvent:function(){var e=this.inputmask,t=e.opts,a=this;if(e.mouseEnter=!0,(a.inputmask.shadowRoot||a.ownerDocument).activeElement!==a){var i=(e.isRTL?n.getBufferTemplate.call(e).slice().reverse():n.getBufferTemplate.call(e)).join("");e.placeholder!==i&&a.placeholder!==e.originalPlaceholder&&(e.originalPlaceholder=a.placeholder),t.showMaskOnHover&&(0,l.HandleNativePlaceholder)(a,i)}},submitEvent:function(){var e=this.inputmask,t=e.opts;e.undoValue!==e._valueGet(!0)&&e.$el.trigger("change"),t.clearMaskOnLostFocus&&-1===n.getLastValidPosition.call(e)&&e._valueGet&&e._valueGet()===n.getBufferTemplate.call(e).join("")&&e._valueSet(""),t.clearIncomplete&&!1===s.isComplete.call(e,n.getBuffer.call(e))&&e._valueSet(""),t.removeMaskOnSubmit&&(e._valueSet(e.unmaskedvalue(),!0),setTimeout((function(){(0,l.writeBuffer)(e.el,n.getBuffer.call(e))}),0))},resetEvent:function(){var e=this.inputmask;e.refreshValue=!0,setTimeout((function(){(0,l.applyInputValue)(e.el,e._valueGet(!0))}),0)}};t.EventHandlers=c},9716:function(e,t,a){Object.defineProperty(t,"__esModule",{value:!0}),t.EventRuler=void 0;var i=s(a(2394)),n=s(a(4528)),r=a(8711),o=a(7760);function s(e){return e&&e.__esModule?e:{default:e}}var l={on:function(e,t,a){var s=e.inputmask.dependencyLib,l=function(t){t.originalEvent&&(t=t.originalEvent||t,arguments[0]=t);var l,u=this,c=u.inputmask,f=c?c.opts:void 0;if(void 0===c&&"FORM"!==this.nodeName){var d=s.data(u,"_inputmask_opts");s(u).off(),d&&new i.default(d).mask(u)}else{if(["submit","reset","setvalue"].includes(t.type)||"FORM"===this.nodeName||!(u.disabled||u.readOnly&&!("keydown"===t.type&&t.ctrlKey&&67===t.keyCode||!1===f.tabThrough&&t.keyCode===n.default.TAB))){switch(t.type){case"input":if(!0===c.skipInputEvent||t.inputType&&"insertCompositionText"===t.inputType)return c.skipInputEvent=!1,t.preventDefault();break;case"keydown":c.skipKeyPressEvent=!1,c.skipInputEvent=c.isComposing=t.keyCode===n.default.KEY_229;break;case"keyup":case"compositionend":c.isComposing&&(c.skipInputEvent=!1);break;case"keypress":if(!0===c.skipKeyPressEvent)return t.preventDefault();c.skipKeyPressEvent=!0;break;case"click":case"focus":return c.validationEvent?(c.validationEvent=!1,e.blur(),(0,o.HandleNativePlaceholder)(e,(c.isRTL?r.getBufferTemplate.call(c).slice().reverse():r.getBufferTemplate.call(c)).join("")),setTimeout((function(){e.focus()}),f.validationEventTimeOut),!1):(l=arguments,setTimeout((function(){e.inputmask&&a.apply(u,l)}),0),!1)}var p=a.apply(u,arguments);return!1===p&&(t.preventDefault(),t.stopPropagation()),p}t.preventDefault()}};["submit","reset"].includes(t)?(l=l.bind(e),null!==e.form&&s(e.form).on(t,l)):s(e).on(t,l),e.inputmask.events[t]=e.inputmask.events[t]||[],e.inputmask.events[t].push(l)},off:function(e,t){if(e.inputmask&&e.inputmask.events){var a=e.inputmask.dependencyLib,i=e.inputmask.events;for(var n in t&&((i=[])[t]=e.inputmask.events[t]),i){for(var r=i[n];r.length>0;){var o=r.pop();["submit","reset"].includes(n)?null!==e.form&&a(e.form).off(n,o):a(e).off(n,o)}delete e.inputmask.events[n]}}}};t.EventRuler=l},219:function(e,t,a){var i=l(a(2394)),n=l(a(4528)),r=l(a(7184)),o=a(8711);function s(e){return(s="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function l(e){return e&&e.__esModule?e:{default:e}}var u=i.default.dependencyLib,c=(new Date).getFullYear(),f={d:["[1-9]|[12][0-9]|3[01]",Date.prototype.setDate,"day",Date.prototype.getDate],dd:["0[1-9]|[12][0-9]|3[01]",Date.prototype.setDate,"day",function(){return g(Date.prototype.getDate.call(this),2)}],ddd:[""],dddd:[""],m:["[1-9]|1[012]",Date.prototype.setMonth,"month",function(){return Date.prototype.getMonth.call(this)+1}],mm:["0[1-9]|1[012]",Date.prototype.setMonth,"month",function(){return g(Date.prototype.getMonth.call(this)+1,2)}],mmm:[""],mmmm:[""],yy:["[0-9]{2}",Date.prototype.setFullYear,"year",function(){return g(Date.prototype.getFullYear.call(this),2)}],yyyy:["[0-9]{4}",Date.prototype.setFullYear,"year",function(){return g(Date.prototype.getFullYear.call(this),4)}],h:["[1-9]|1[0-2]",Date.prototype.setHours,"hours",Date.prototype.getHours],hh:["0[1-9]|1[0-2]",Date.prototype.setHours,"hours",function(){return g(Date.prototype.getHours.call(this),2)}],hx:[function(e){return"[0-9]{".concat(e,"}")},Date.prototype.setHours,"hours",function(e){return Date.prototype.getHours}],H:["1?[0-9]|2[0-3]",Date.prototype.setHours,"hours",Date.prototype.getHours],HH:["0[0-9]|1[0-9]|2[0-3]",Date.prototype.setHours,"hours",function(){return g(Date.prototype.getHours.call(this),2)}],Hx:[function(e){return"[0-9]{".concat(e,"}")},Date.prototype.setHours,"hours",function(e){return function(){return g(Date.prototype.getHours.call(this),e)}}],M:["[1-5]?[0-9]",Date.prototype.setMinutes,"minutes",Date.prototype.getMinutes],MM:["0[0-9]|1[0-9]|2[0-9]|3[0-9]|4[0-9]|5[0-9]",Date.prototype.setMinutes,"minutes",function(){return g(Date.prototype.getMinutes.call(this),2)}],s:["[1-5]?[0-9]",Date.prototype.setSeconds,"seconds",Date.prototype.getSeconds],ss:["0[0-9]|1[0-9]|2[0-9]|3[0-9]|4[0-9]|5[0-9]",Date.prototype.setSeconds,"seconds",function(){return g(Date.prototype.getSeconds.call(this),2)}],l:["[0-9]{3}",Date.prototype.setMilliseconds,"milliseconds",function(){return g(Date.prototype.getMilliseconds.call(this),3)}],L:["[0-9]{2}",Date.prototype.setMilliseconds,"milliseconds",function(){return g(Date.prototype.getMilliseconds.call(this),2)}],t:["[ap]"],tt:["[ap]m"],T:["[AP]"],TT:["[AP]M"],Z:[""],o:[""],S:[""]},d={isoDate:"yyyy-mm-dd",isoTime:"HH:MM:ss",isoDateTime:"yyyy-mm-dd'T'HH:MM:ss",isoUtcDateTime:"UTC:yyyy-mm-dd'T'HH:MM:ss'Z'"};function p(e){var t=new RegExp("\\d+$").exec(e[0]);if(t&&void 0!==t[0]){var a=f[e[0][0]+"x"].slice("");return a[0]=a[0](t[0]),a[3]=a[3](t[0]),a}if(f[e[0]])return f[e[0]]}function h(e){if(!e.tokenizer){var t=[],a=[];for(var i in f)if(/\.*x$/.test(i)){var n=i[0]+"\\d+";-1===a.indexOf(n)&&a.push(n)}else-1===t.indexOf(i[0])&&t.push(i[0]);e.tokenizer="("+(a.length>0?a.join("|")+"|":"")+t.join("+|")+")+?|.",e.tokenizer=new RegExp(e.tokenizer,"g")}return e.tokenizer}function m(e,t,a){if(void 0===e.rawday||!isFinite(e.rawday)&&new Date(e.date.getFullYear(),isFinite(e.rawmonth)?e.month:e.date.getMonth()+1,0).getDate()>=e.day||"29"==e.day&&!Number.isFinite(e.rawyear)||new Date(e.date.getFullYear(),isFinite(e.rawmonth)?e.month:e.date.getMonth()+1,0).getDate()>=e.day)return t;if("29"==e.day){var i=b(t.pos,a);if("yyyy"===i.targetMatch[0]&&t.pos-i.targetMatchIndex==2)return t.remove=t.pos+1,t}else if("02"==e.month&&"30"==e.day&&void 0!==t.c)return e.day="03",e.date.setDate(3),e.date.setMonth(1),t.insert=[{pos:t.pos,c:"0"},{pos:t.pos+1,c:t.c}],t.caret=o.seekNext.call(this,t.pos+1),t;return!1}function v(e,t,a,i){var n,o,s="";for(h(a).lastIndex=0;n=h(a).exec(e);){if(void 0===t)if(o=p(n))s+="("+o[0]+")";else switch(n[0]){case"[":s+="(";break;case"]":s+=")?";break;default:s+=(0,r.default)(n[0])}else if(o=p(n))if(!0!==i&&o[3])s+=o[3].call(t.date);else o[2]?s+=t["raw"+o[2]]:s+=n[0];else s+=n[0]}return s}function g(e,t,a){for(e=String(e),t=t||2;e.length<t;)e=a?e+"0":"0"+e;return e}function k(e,t,a){var i,n,r,o={date:new Date(1,0,1)},l=e;function u(e,t,a){if(e[i]=t.replace(/[^0-9]/g,"0"),e["raw"+i]=t,void 0!==r){var n=e[i];("day"===i&&29===parseInt(n)||"month"===i&&2===parseInt(n))&&(29!==parseInt(e.day)||2!==parseInt(e.month)||""!==e.year&&void 0!==e.year||e.date.setFullYear(2012,1,29)),"day"===i&&0===parseInt(n)&&(n=1),"month"===i&&(n=parseInt(n))>0&&(n-=1),"year"===i&&n.length<4&&(n=g(n,4,!0)),""===n||isNaN(n)||r.call(e.date,n)}}if("string"==typeof l){for(h(a).lastIndex=0;n=h(a).exec(t);){var c=new RegExp("\\d+$").exec(n[0]),d=c?n[0][0]+"x":n[0],p=void 0;if(c){var m=h(a).lastIndex,v=b(n.index,a);h(a).lastIndex=m,p=l.slice(0,l.indexOf(v.nextMatch[0]))}else p=l.slice(0,d.length);Object.prototype.hasOwnProperty.call(f,d)&&(i=f[d][2],r=f[d][1],u(o,p)),l=l.slice(p.length)}return o}if(l&&"object"===s(l)&&Object.prototype.hasOwnProperty.call(l,"date"))return l}function y(e,t){return v(t.inputFormat,{date:e},t)}function b(e,t){var a,i,n=0,r=0;for(h(t).lastIndex=0;i=h(t).exec(t.inputFormat);){var o=new RegExp("\\d+$").exec(i[0]);if((n+=r=o?parseInt(o[0]):i[0].length)>=e){a=i,i=h(t).exec(t.inputFormat);break}}return{targetMatchIndex:n-r,nextMatch:i,targetMatch:a}}i.default.extendAliases({datetime:{mask:function(e){return e.numericInput=!1,f.S=e.i18n.ordinalSuffix.join("|"),e.inputFormat=d[e.inputFormat]||e.inputFormat,e.displayFormat=d[e.displayFormat]||e.displayFormat||e.inputFormat,e.outputFormat=d[e.outputFormat]||e.outputFormat||e.inputFormat,e.placeholder=""!==e.placeholder?e.placeholder:e.inputFormat.replace(/[[\]]/,""),e.regex=v(e.inputFormat,void 0,e),e.min=k(e.min,e.inputFormat,e),e.max=k(e.max,e.inputFormat,e),null},placeholder:"",inputFormat:"isoDateTime",displayFormat:void 0,outputFormat:void 0,min:null,max:null,skipOptionalPartCharacter:"",i18n:{dayNames:["Mon","Tue","Wed","Thu","Fri","Sat","Sun","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday","Sunday"],monthNames:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec","January","February","March","April","May","June","July","August","September","October","November","December"],ordinalSuffix:["st","nd","rd","th"]},preValidation:function(e,t,a,i,n,r,o,s){if(s)return!0;if(isNaN(a)&&e[t]!==a){var l=b(t,n);if(l.nextMatch&&l.nextMatch[0]===a&&l.targetMatch[0].length>1){var u=f[l.targetMatch[0]][0];if(new RegExp(u).test("0"+e[t-1]))return e[t]=e[t-1],e[t-1]="0",{fuzzy:!0,buffer:e,refreshFromBuffer:{start:t-1,end:t+1},pos:t+1}}}return!0},postValidation:function(e,t,a,i,n,r,o,s){var l,u;if(o)return!0;if(!1===i&&(((l=b(t+1,n)).targetMatch&&l.targetMatchIndex===t&&l.targetMatch[0].length>1&&void 0!==f[l.targetMatch[0]]||(l=b(t+2,n)).targetMatch&&l.targetMatchIndex===t+1&&l.targetMatch[0].length>1&&void 0!==f[l.targetMatch[0]])&&(u=f[l.targetMatch[0]][0]),void 0!==u&&(void 0!==r.validPositions[t+1]&&new RegExp(u).test(a+"0")?(e[t]=a,e[t+1]="0",i={pos:t+2,caret:t}):new RegExp(u).test("0"+a)&&(e[t]="0",e[t+1]=a,i={pos:t+2})),!1===i))return i;if(i.fuzzy&&(e=i.buffer,t=i.pos),(l=b(t,n)).targetMatch&&l.targetMatch[0]&&void 0!==f[l.targetMatch[0]]){u=f[l.targetMatch[0]][0];var d=e.slice(l.targetMatchIndex,l.targetMatchIndex+l.targetMatch[0].length);!1===new RegExp(u).test(d.join(""))&&2===l.targetMatch[0].length&&r.validPositions[l.targetMatchIndex]&&r.validPositions[l.targetMatchIndex+1]&&(r.validPositions[l.targetMatchIndex+1].input="0")}var p=i,h=k(e.join(""),n.inputFormat,n);return p&&h.date.getTime()==h.date.getTime()&&(n.prefillYear&&(p=function(e,t,a){if(e.year!==e.rawyear){var i=c.toString(),n=e.rawyear.replace(/[^0-9]/g,""),r=i.slice(0,n.length),o=i.slice(n.length);if(2===n.length&&n===r){var s=new Date(c,e.month-1,e.day);e.day==s.getDate()&&(!a.max||a.max.date.getTime()>=s.getTime())&&(e.date.setFullYear(c),e.year=i,t.insert=[{pos:t.pos+1,c:o[0]},{pos:t.pos+2,c:o[1]}])}}return t}(h,p,n)),p=function(e,t,a,i,n){if(!t)return t;if(a.min){if(e.rawyear){var r,o=e.rawyear.replace(/[^0-9]/g,""),s=a.min.year.substr(0,o.length);if(o<s){var l=b(t.pos,a);if(o=e.rawyear.substr(0,t.pos-l.targetMatchIndex+1).replace(/[^0-9]/g,"0"),(s=a.min.year.substr(0,o.length))<=o)return t.remove=l.targetMatchIndex+o.length,t;if(o="yyyy"===l.targetMatch[0]?e.rawyear.substr(1,1):e.rawyear.substr(0,1),s=a.min.year.substr(2,1),r=a.max?a.max.year.substr(2,1):o,1===o.length&&s<=o&&o<=r&&!0!==n)return"yyyy"===l.targetMatch[0]?(t.insert=[{pos:t.pos+1,c:o,strict:!0}],t.caret=t.pos+2,i.validPositions[t.pos].input=a.min.year[1]):(t.insert=[{pos:t.pos+1,c:a.min.year[1],strict:!0},{pos:t.pos+2,c:o,strict:!0}],t.caret=t.pos+3,i.validPositions[t.pos].input=a.min.year[0]),t;t=!1}}t&&e.year&&e.year===e.rawyear&&a.min.date.getTime()==a.min.date.getTime()&&(t=a.min.date.getTime()<=e.date.getTime())}return t&&a.max&&a.max.date.getTime()==a.max.date.getTime()&&(t=a.max.date.getTime()>=e.date.getTime()),t}(h,p=m.call(this,h,p,n),n,r,s)),void 0!==t&&p&&i.pos!==t?{buffer:v(n.inputFormat,h,n).split(""),refreshFromBuffer:{start:t,end:i.pos},pos:i.caret||i.pos}:p},onKeyDown:function(e,t,a,i){e.ctrlKey&&e.keyCode===n.default.RIGHT&&(this.inputmask._valueSet(y(new Date,i)),u(this).trigger("setvalue"))},onUnMask:function(e,t,a){return t?v(a.outputFormat,k(e,a.inputFormat,a),a,!0):t},casing:function(e,t,a,i){return 0==t.nativeDef.indexOf("[ap]")?e.toLowerCase():0==t.nativeDef.indexOf("[AP]")?e.toUpperCase():e},onBeforeMask:function(e,t){return"[object Date]"===Object.prototype.toString.call(e)&&(e=y(e,t)),e},insertMode:!1,shiftPositions:!1,keepStatic:!1,inputmode:"numeric",prefillYear:!0}})},3851:function(e,t,a){var i,n=(i=a(2394))&&i.__esModule?i:{default:i},r=a(8711),o=a(4713);n.default.extendDefinitions({A:{validator:"[A-Za-z\u0410-\u044f\u0401\u0451\xc0-\xff\xb5]",casing:"upper"},"&":{validator:"[0-9A-Za-z\u0410-\u044f\u0401\u0451\xc0-\xff\xb5]",casing:"upper"},"#":{validator:"[0-9A-Fa-f]",casing:"upper"}});var s=new RegExp("25[0-5]|2[0-4][0-9]|[01][0-9][0-9]");function l(e,t,a,i,n){return a-1>-1&&"."!==t.buffer[a-1]?(e=t.buffer[a-1]+e,e=a-2>-1&&"."!==t.buffer[a-2]?t.buffer[a-2]+e:"0"+e):e="00"+e,s.test(e)}n.default.extendAliases({cssunit:{regex:"[+-]?[0-9]+\\.?([0-9]+)?(px|em|rem|ex|%|in|cm|mm|pt|pc)"},url:{regex:"(https?|ftp)://.*",autoUnmask:!1,keepStatic:!1,tabThrough:!0},ip:{mask:"i[i[i]].j[j[j]].k[k[k]].l[l[l]]",definitions:{i:{validator:l},j:{validator:l},k:{validator:l},l:{validator:l}},onUnMask:function(e,t,a){return e},inputmode:"numeric"},email:{mask:"*{1,64}[.*{1,64}][.*{1,64}][.*{1,63}]@-{1,63}.-{1,63}[.-{1,63}][.-{1,63}]",greedy:!1,casing:"lower",onBeforePaste:function(e,t){return(e=e.toLowerCase()).replace("mailto:","")},definitions:{"*":{validator:"[0-9\uff11-\uff19A-Za-z\u0410-\u044f\u0401\u0451\xc0-\xff\xb5!#$%&'*+/=?^_`{|}~-]"},"-":{validator:"[0-9A-Za-z-]"}},onUnMask:function(e,t,a){return e},inputmode:"email"},mac:{mask:"##:##:##:##:##:##"},vin:{mask:"V{13}9{4}",definitions:{V:{validator:"[A-HJ-NPR-Za-hj-npr-z\\d]",casing:"upper"}},clearIncomplete:!0,autoUnmask:!0},ssn:{mask:"999-99-9999",postValidation:function(e,t,a,i,n,s,l){var u=o.getMaskTemplate.call(this,!0,r.getLastValidPosition.call(this),!0,!0);return/^(?!219-09-9999|078-05-1120)(?!666|000|9.{2}).{3}-(?!00).{2}-(?!0{4}).{4}$/.test(u.join(""))}}})},207:function(e,t,a){var i=s(a(2394)),n=s(a(4528)),r=s(a(7184)),o=a(8711);function s(e){return e&&e.__esModule?e:{default:e}}var l=i.default.dependencyLib;function u(e,t){for(var a="",n=0;n<e.length;n++)i.default.prototype.definitions[e.charAt(n)]||t.definitions[e.charAt(n)]||t.optionalmarker[0]===e.charAt(n)||t.optionalmarker[1]===e.charAt(n)||t.quantifiermarker[0]===e.charAt(n)||t.quantifiermarker[1]===e.charAt(n)||t.groupmarker[0]===e.charAt(n)||t.groupmarker[1]===e.charAt(n)||t.alternatormarker===e.charAt(n)?a+="\\"+e.charAt(n):a+=e.charAt(n);return a}function c(e,t,a,i){if(e.length>0&&t>0&&(!a.digitsOptional||i)){var n=e.indexOf(a.radixPoint),r=!1;a.negationSymbol.back===e[e.length-1]&&(r=!0,e.length--),-1===n&&(e.push(a.radixPoint),n=e.length-1);for(var o=1;o<=t;o++)isFinite(e[n+o])||(e[n+o]="0")}return r&&e.push(a.negationSymbol.back),e}function f(e,t){var a=0;if("+"===e){for(a in t.validPositions);a=o.seekNext.call(this,parseInt(a))}for(var i in t.tests)if((i=parseInt(i))>=a)for(var n=0,r=t.tests[i].length;n<r;n++)if((void 0===t.validPositions[i]||"-"===e)&&t.tests[i][n].match.def===e)return i+(void 0!==t.validPositions[i]&&"-"!==e?1:0);return a}function d(e,t){var a=-1;for(var i in t.validPositions){var n=t.validPositions[i];if(n&&n.match.def===e){a=parseInt(i);break}}return a}function p(e,t,a,i,n){var r=t.buffer?t.buffer.indexOf(n.radixPoint):-1,o=-1!==r&&new RegExp(n.definitions[9].validator).test(e);return n._radixDance&&o&&null==t.validPositions[r]?{insert:{pos:r===a?r+1:r,c:n.radixPoint},pos:a}:o}i.default.extendAliases({numeric:{mask:function(e){e.repeat=0,e.groupSeparator===e.radixPoint&&e.digits&&"0"!==e.digits&&("."===e.radixPoint?e.groupSeparator=",":","===e.radixPoint?e.groupSeparator=".":e.groupSeparator="")," "===e.groupSeparator&&(e.skipOptionalPartCharacter=void 0),e.placeholder.length>1&&(e.placeholder=e.placeholder.charAt(0)),"radixFocus"===e.positionCaretOnClick&&""===e.placeholder&&(e.positionCaretOnClick="lvp");var t="0",a=e.radixPoint;!0===e.numericInput&&void 0===e.__financeInput?(t="1",e.positionCaretOnClick="radixFocus"===e.positionCaretOnClick?"lvp":e.positionCaretOnClick,e.digitsOptional=!1,isNaN(e.digits)&&(e.digits=2),e._radixDance=!1,a=","===e.radixPoint?"?":"!",""!==e.radixPoint&&void 0===e.definitions[a]&&(e.definitions[a]={},e.definitions[a].validator="["+e.radixPoint+"]",e.definitions[a].placeholder=e.radixPoint,e.definitions[a].static=!0,e.definitions[a].generated=!0)):(e.__financeInput=!1,e.numericInput=!0);var i,n="[+]";if(n+=u(e.prefix,e),""!==e.groupSeparator?(void 0===e.definitions[e.groupSeparator]&&(e.definitions[e.groupSeparator]={},e.definitions[e.groupSeparator].validator="["+e.groupSeparator+"]",e.definitions[e.groupSeparator].placeholder=e.groupSeparator,e.definitions[e.groupSeparator].static=!0,e.definitions[e.groupSeparator].generated=!0),n+=e._mask(e)):n+="9{+}",void 0!==e.digits&&0!==e.digits){var o=e.digits.toString().split(",");isFinite(o[0])&&o[1]&&isFinite(o[1])?n+=a+t+"{"+e.digits+"}":(isNaN(e.digits)||parseInt(e.digits)>0)&&(e.digitsOptional?(i=n+a+t+"{0,"+e.digits+"}",e.keepStatic=!0):n+=a+t+"{"+e.digits+"}")}else e.inputmode="numeric";return n+=u(e.suffix,e),n+="[-]",i&&(n=[i+u(e.suffix,e)+"[-]",n]),e.greedy=!1,function(e){void 0===e.parseMinMaxOptions&&(null!==e.min&&(e.min=e.min.toString().replace(new RegExp((0,r.default)(e.groupSeparator),"g"),""),","===e.radixPoint&&(e.min=e.min.replace(e.radixPoint,".")),e.min=isFinite(e.min)?parseFloat(e.min):NaN,isNaN(e.min)&&(e.min=Number.MIN_VALUE)),null!==e.max&&(e.max=e.max.toString().replace(new RegExp((0,r.default)(e.groupSeparator),"g"),""),","===e.radixPoint&&(e.max=e.max.replace(e.radixPoint,".")),e.max=isFinite(e.max)?parseFloat(e.max):NaN,isNaN(e.max)&&(e.max=Number.MAX_VALUE)),e.parseMinMaxOptions="done")}(e),n},_mask:function(e){return"("+e.groupSeparator+"999){+|1}"},digits:"*",digitsOptional:!0,enforceDigitsOnBlur:!1,radixPoint:".",positionCaretOnClick:"radixFocus",_radixDance:!0,groupSeparator:"",allowMinus:!0,negationSymbol:{front:"-",back:""},prefix:"",suffix:"",min:null,max:null,SetMaxOnOverflow:!1,step:1,inputType:"text",unmaskAsNumber:!1,roundingFN:Math.round,inputmode:"decimal",shortcuts:{k:"000",m:"000000"},placeholder:"0",greedy:!1,rightAlign:!0,insertMode:!0,autoUnmask:!1,skipOptionalPartCharacter:"",usePrototypeDefinitions:!1,definitions:{0:{validator:p},1:{validator:p,definitionSymbol:"9"},9:{validator:"[0-9\uff10-\uff19\u0660-\u0669\u06f0-\u06f9]",definitionSymbol:"*"},"+":{validator:function(e,t,a,i,n){return n.allowMinus&&("-"===e||e===n.negationSymbol.front)}},"-":{validator:function(e,t,a,i,n){return n.allowMinus&&e===n.negationSymbol.back}}},preValidation:function(e,t,a,i,n,r,o,s){var l;if(!1!==n.__financeInput&&a===n.radixPoint)return!1;if(l=n.shortcuts&&n.shortcuts[a]){if(l.length>1)for(var u=[],c=0;c<l.length;c++)u.push({pos:t+c,c:l[c],strict:!1});return{insert:u}}var p=e.indexOf(n.radixPoint),h=t;if(t=function(e,t,a,i,n){return n._radixDance&&n.numericInput&&t!==n.negationSymbol.back&&e<=a&&(a>0||t==n.radixPoint)&&(void 0===i.validPositions[e-1]||i.validPositions[e-1].input!==n.negationSymbol.back)&&(e-=1),e}(t,a,p,r,n),"-"===a||a===n.negationSymbol.front){if(!0!==n.allowMinus)return!1;var m=!1,v=d("+",r),g=d("-",r);return-1!==v&&(m=[v,g]),!1!==m?{remove:m,caret:h-n.negationSymbol.back.length}:{insert:[{pos:f.call(this,"+",r),c:n.negationSymbol.front,fromIsValid:!0},{pos:f.call(this,"-",r),c:n.negationSymbol.back,fromIsValid:void 0}],caret:h+n.negationSymbol.back.length}}if(a===n.groupSeparator)return{caret:h};if(s)return!0;if(-1!==p&&!0===n._radixDance&&!1===i&&a===n.radixPoint&&void 0!==n.digits&&(isNaN(n.digits)||parseInt(n.digits)>0)&&p!==t)return{caret:n._radixDance&&t===p-1?p+1:p};if(!1===n.__financeInput)if(i){if(n.digitsOptional)return{rewritePosition:o.end};if(!n.digitsOptional){if(o.begin>p&&o.end<=p)return a===n.radixPoint?{insert:{pos:p+1,c:"0",fromIsValid:!0},rewritePosition:p}:{rewritePosition:p+1};if(o.begin<p)return{rewritePosition:o.begin-1}}}else if(!n.showMaskOnHover&&!n.showMaskOnFocus&&!n.digitsOptional&&n.digits>0&&""===this.__valueGet.call(this.el))return{rewritePosition:p};return{rewritePosition:t}},postValidation:function(e,t,a,i,n,r,o){if(!1===i)return i;if(o)return!0;if(null!==n.min||null!==n.max){var s=n.onUnMask(e.slice().reverse().join(""),void 0,l.extend({},n,{unmaskAsNumber:!0}));if(null!==n.min&&s<n.min&&(s.toString().length>n.min.toString().length||s<0))return!1;if(null!==n.max&&s>n.max)return!!n.SetMaxOnOverflow&&{refreshFromBuffer:!0,buffer:c(n.max.toString().replace(".",n.radixPoint).split(""),n.digits,n).reverse()}}return i},onUnMask:function(e,t,a){if(""===t&&!0===a.nullable)return t;var i=e.replace(a.prefix,"");return i=(i=i.replace(a.suffix,"")).replace(new RegExp((0,r.default)(a.groupSeparator),"g"),""),""!==a.placeholder.charAt(0)&&(i=i.replace(new RegExp(a.placeholder.charAt(0),"g"),"0")),a.unmaskAsNumber?(""!==a.radixPoint&&-1!==i.indexOf(a.radixPoint)&&(i=i.replace(r.default.call(this,a.radixPoint),".")),i=(i=i.replace(new RegExp("^"+(0,r.default)(a.negationSymbol.front)),"-")).replace(new RegExp((0,r.default)(a.negationSymbol.back)+"$"),""),Number(i)):i},isComplete:function(e,t){var a=(t.numericInput?e.slice().reverse():e).join("");return a=(a=(a=(a=(a=a.replace(new RegExp("^"+(0,r.default)(t.negationSymbol.front)),"-")).replace(new RegExp((0,r.default)(t.negationSymbol.back)+"$"),"")).replace(t.prefix,"")).replace(t.suffix,"")).replace(new RegExp((0,r.default)(t.groupSeparator)+"([0-9]{3})","g"),"$1"),","===t.radixPoint&&(a=a.replace((0,r.default)(t.radixPoint),".")),isFinite(a)},onBeforeMask:function(e,t){var a=t.radixPoint||",";isFinite(t.digits)&&(t.digits=parseInt(t.digits)),"number"!=typeof e&&"number"!==t.inputType||""===a||(e=e.toString().replace(".",a));var i="-"===e.charAt(0)||e.charAt(0)===t.negationSymbol.front,n=e.split(a),o=n[0].replace(/[^\-0-9]/g,""),s=n.length>1?n[1].replace(/[^0-9]/g,""):"",l=n.length>1;e=o+(""!==s?a+s:s);var u=0;if(""!==a&&(u=t.digitsOptional?t.digits<s.length?t.digits:s.length:t.digits,""!==s||!t.digitsOptional)){var f=Math.pow(10,u||1);e=e.replace((0,r.default)(a),"."),isNaN(parseFloat(e))||(e=(t.roundingFN(parseFloat(e)*f)/f).toFixed(u)),e=e.toString().replace(".",a)}if(0===t.digits&&-1!==e.indexOf(a)&&(e=e.substring(0,e.indexOf(a))),null!==t.min||null!==t.max){var d=e.toString().replace(a,".");null!==t.min&&d<t.min?e=t.min.toString().replace(".",a):null!==t.max&&d>t.max&&(e=t.max.toString().replace(".",a))}return i&&"-"!==e.charAt(0)&&(e="-"+e),c(e.toString().split(""),u,t,l).join("")},onBeforeWrite:function(e,t,a,i){function n(e,t){if(!1!==i.__financeInput||t){var a=e.indexOf(i.radixPoint);-1!==a&&e.splice(a,1)}if(""!==i.groupSeparator)for(;-1!==(a=e.indexOf(i.groupSeparator));)e.splice(a,1);return e}var o,s=function(e,t){var a=new RegExp("(^"+(""!==t.negationSymbol.front?(0,r.default)(t.negationSymbol.front)+"?":"")+(0,r.default)(t.prefix)+")(.*)("+(0,r.default)(t.suffix)+(""!=t.negationSymbol.back?(0,r.default)(t.negationSymbol.back)+"?":"")+"$)").exec(e.slice().reverse().join("")),i=a?a[2]:"",n=!1;return i&&(i=i.split(t.radixPoint.charAt(0))[0],n=new RegExp("^[0"+t.groupSeparator+"]*").exec(i)),!(!n||!(n[0].length>1||n[0].length>0&&n[0].length<i.length))&&n}(t,i);if(s)for(var u=t.join("").lastIndexOf(s[0].split("").reverse().join(""))-(s[0]==s.input?0:1),f=s[0]==s.input?1:0,d=s[0].length-f;d>0;d--)delete this.maskset.validPositions[u+d],delete t[u+d];if(e)switch(e.type){case"blur":case"checkval":if(null!==i.min){var p=i.onUnMask(t.slice().reverse().join(""),void 0,l.extend({},i,{unmaskAsNumber:!0}));if(null!==i.min&&p<i.min)return{refreshFromBuffer:!0,buffer:c(i.min.toString().replace(".",i.radixPoint).split(""),i.digits,i).reverse()}}if(t[t.length-1]===i.negationSymbol.front){var h=new RegExp("(^"+(""!=i.negationSymbol.front?(0,r.default)(i.negationSymbol.front)+"?":"")+(0,r.default)(i.prefix)+")(.*)("+(0,r.default)(i.suffix)+(""!=i.negationSymbol.back?(0,r.default)(i.negationSymbol.back)+"?":"")+"$)").exec(n(t.slice(),!0).reverse().join(""));0==(h?h[2]:"")&&(o={refreshFromBuffer:!0,buffer:[0]})}else""!==i.radixPoint&&t[0]===i.radixPoint&&(o&&o.buffer?o.buffer.shift():(t.shift(),o={refreshFromBuffer:!0,buffer:n(t)}));if(i.enforceDigitsOnBlur){var m=(o=o||{})&&o.buffer||t.slice().reverse();o.refreshFromBuffer=!0,o.buffer=c(m,i.digits,i,!0).reverse()}}return o},onKeyDown:function(e,t,a,i){var r,o=l(this);if(e.ctrlKey)switch(e.keyCode){case n.default.UP:return this.inputmask.__valueSet.call(this,parseFloat(this.inputmask.unmaskedvalue())+parseInt(i.step)),o.trigger("setvalue"),!1;case n.default.DOWN:return this.inputmask.__valueSet.call(this,parseFloat(this.inputmask.unmaskedvalue())-parseInt(i.step)),o.trigger("setvalue"),!1}if(!e.shiftKey&&(e.keyCode===n.default.DELETE||e.keyCode===n.default.BACKSPACE||e.keyCode===n.default.BACKSPACE_SAFARI)&&a.begin!==t.length){if(t[e.keyCode===n.default.DELETE?a.begin-1:a.end]===i.negationSymbol.front)return r=t.slice().reverse(),""!==i.negationSymbol.front&&r.shift(),""!==i.negationSymbol.back&&r.pop(),o.trigger("setvalue",[r.join(""),a.begin]),!1;if(!0===i._radixDance){var s=t.indexOf(i.radixPoint);if(i.digitsOptional){if(0===s)return(r=t.slice().reverse()).pop(),o.trigger("setvalue",[r.join(""),a.begin>=r.length?r.length:a.begin]),!1}else if(-1!==s&&(a.begin<s||a.end<s||e.keyCode===n.default.DELETE&&a.begin===s))return a.begin!==a.end||e.keyCode!==n.default.BACKSPACE&&e.keyCode!==n.default.BACKSPACE_SAFARI||a.begin++,(r=t.slice().reverse()).splice(r.length-a.begin,a.begin-a.end+1),r=c(r,i.digits,i).join(""),o.trigger("setvalue",[r,a.begin>=r.length?s+1:a.begin]),!1}}}},currency:{prefix:"",groupSeparator:",",alias:"numeric",digits:2,digitsOptional:!1},decimal:{alias:"numeric"},integer:{alias:"numeric",inputmode:"numeric",digits:0},percentage:{alias:"numeric",min:0,max:100,suffix:" %",digits:0,allowMinus:!1},indianns:{alias:"numeric",_mask:function(e){return"("+e.groupSeparator+"99){*|1}("+e.groupSeparator+"999){1|1}"},groupSeparator:",",radixPoint:".",placeholder:"0",digits:2,digitsOptional:!1}})},9380:function(e,t,a){var i;Object.defineProperty(t,"__esModule",{value:!0}),t.default=void 0;var n=((i=a(8741))&&i.__esModule?i:{default:i}).default?window:{};t.default=n},7760:function(e,t,a){Object.defineProperty(t,"__esModule",{value:!0}),t.applyInputValue=c,t.clearOptionalTail=f,t.checkVal=d,t.HandleNativePlaceholder=function(e,t){var a=e?e.inputmask:this;if(l.ie){if(e.inputmask._valueGet()!==t&&(e.placeholder!==t||""===e.placeholder)){var i=o.getBuffer.call(a).slice(),n=e.inputmask._valueGet();if(n!==t){var r=o.getLastValidPosition.call(a);-1===r&&n===o.getBufferTemplate.call(a).join("")?i=[]:-1!==r&&f.call(a,i),p(e,i)}}}else e.placeholder!==t&&(e.placeholder=t,""===e.placeholder&&e.removeAttribute("placeholder"))},t.unmaskedvalue=function(e){var t=e?e.inputmask:this,a=t.opts,i=t.maskset;if(e){if(void 0===e.inputmask)return e.value;e.inputmask&&e.inputmask.refreshValue&&c(e,e.inputmask._valueGet(!0))}var n=[],r=i.validPositions;for(var s in r)r[s]&&r[s].match&&(1!=r[s].match.static||Array.isArray(i.metadata)&&!0!==r[s].generatedInput)&&n.push(r[s].input);var l=0===n.length?"":(t.isRTL?n.reverse():n).join("");if("function"==typeof a.onUnMask){var u=(t.isRTL?o.getBuffer.call(t).slice().reverse():o.getBuffer.call(t)).join("");l=a.onUnMask.call(t,u,l,a)}return l},t.writeBuffer=p;var i,n=(i=a(4528))&&i.__esModule?i:{default:i},r=a(4713),o=a(8711),s=a(7215),l=a(9845),u=a(6030);function c(e,t){var a=e?e.inputmask:this,i=a.opts;e.inputmask.refreshValue=!1,"function"==typeof i.onBeforeMask&&(t=i.onBeforeMask.call(a,t,i)||t),d(e,!0,!1,t=t.toString().split("")),a.undoValue=a._valueGet(!0),(i.clearMaskOnLostFocus||i.clearIncomplete)&&e.inputmask._valueGet()===o.getBufferTemplate.call(a).join("")&&-1===o.getLastValidPosition.call(a)&&e.inputmask._valueSet("")}function f(e){e.length=0;for(var t,a=r.getMaskTemplate.call(this,!0,0,!0,void 0,!0);void 0!==(t=a.shift());)e.push(t);return e}function d(e,t,a,i,n){var l=e?e.inputmask:this,c=l.maskset,f=l.opts,d=l.dependencyLib,h=i.slice(),m="",v=-1,g=void 0,k=f.skipOptionalPartCharacter;f.skipOptionalPartCharacter="",o.resetMaskSet.call(l),c.tests={},v=f.radixPoint?o.determineNewCaretPosition.call(l,{begin:0,end:0},!1,!1===f.__financeInput?"radixFocus":void 0).begin:0,c.p=v,l.caretPos={begin:v};var y=[],b=l.caretPos;if(h.forEach((function(e,t){if(void 0!==e){var i=new d.Event("_checkval");i.which=e.toString().charCodeAt(0),m+=e;var n=o.getLastValidPosition.call(l,void 0,!0);!function(e,t){for(var a=r.getMaskTemplate.call(l,!0,0).slice(e,o.seekNext.call(l,e,!1,!1)).join("").replace(/'/g,""),i=a.indexOf(t);i>0&&" "===a[i-1];)i--;var n=0===i&&!o.isMask.call(l,e)&&(r.getTest.call(l,e).match.nativeDef===t.charAt(0)||!0===r.getTest.call(l,e).match.static&&r.getTest.call(l,e).match.nativeDef==="'"+t.charAt(0)||" "===r.getTest.call(l,e).match.nativeDef&&(r.getTest.call(l,e+1).match.nativeDef===t.charAt(0)||!0===r.getTest.call(l,e+1).match.static&&r.getTest.call(l,e+1).match.nativeDef==="'"+t.charAt(0)));if(!n&&i>0&&!o.isMask.call(l,e,!1,!0)){var s=o.seekNext.call(l,e);l.caretPos.begin<s&&(l.caretPos={begin:s})}return n}(v,m)?(g=u.EventHandlers.keypressEvent.call(l,i,!0,!1,a,l.caretPos.begin))&&(v=l.caretPos.begin+1,m=""):g=u.EventHandlers.keypressEvent.call(l,i,!0,!1,a,n+1),g?(void 0!==g.pos&&c.validPositions[g.pos]&&!0===c.validPositions[g.pos].match.static&&void 0===c.validPositions[g.pos].alternation&&(y.push(g.pos),l.isRTL||(g.forwardPosition=g.pos+1)),p.call(l,void 0,o.getBuffer.call(l),g.forwardPosition,i,!1),l.caretPos={begin:g.forwardPosition,end:g.forwardPosition},b=l.caretPos):void 0===c.validPositions[t]&&h[t]===r.getPlaceholder.call(l,t)&&o.isMask.call(l,t,!0)?l.caretPos.begin++:l.caretPos=b}})),y.length>0){var x,P,E=o.seekNext.call(l,-1,void 0,!1);if(!s.isComplete.call(l,o.getBuffer.call(l))&&y.length<=E||s.isComplete.call(l,o.getBuffer.call(l))&&y.length>0&&y.length!==E&&0===y[0])for(var S=E;void 0!==(x=y.shift());){var _=new d.Event("_checkval");if((P=c.validPositions[x]).generatedInput=!0,_.which=P.input.charCodeAt(0),(g=u.EventHandlers.keypressEvent.call(l,_,!0,!1,a,S))&&void 0!==g.pos&&g.pos!==x&&c.validPositions[g.pos]&&!0===c.validPositions[g.pos].match.static)y.push(g.pos);else if(!g)break;S++}}t&&p.call(l,e,o.getBuffer.call(l),g?g.forwardPosition:l.caretPos.begin,n||new d.Event("checkval"),n&&"input"===n.type&&l.undoValue!==l._valueGet(!0)),f.skipOptionalPartCharacter=k}function p(e,t,a,i,r){var l=e?e.inputmask:this,u=l.opts,c=l.dependencyLib;if(i&&"function"==typeof u.onBeforeWrite){var f=u.onBeforeWrite.call(l,i,t,a,u);if(f){if(f.refreshFromBuffer){var d=f.refreshFromBuffer;s.refreshFromBuffer.call(l,!0===d?d:d.start,d.end,f.buffer||t),t=o.getBuffer.call(l,!0)}void 0!==a&&(a=void 0!==f.caret?f.caret:a)}}if(void 0!==e&&(e.inputmask._valueSet(t.join("")),void 0===a||void 0!==i&&"blur"===i.type||o.caret.call(l,e,a,void 0,void 0,void 0!==i&&"keydown"===i.type&&(i.keyCode===n.default.DELETE||i.keyCode===n.default.BACKSPACE)),!0===r)){var p=c(e),h=e.inputmask._valueGet();e.inputmask.skipInputEvent=!0,p.trigger("input"),setTimeout((function(){h===o.getBufferTemplate.call(l).join("")?p.trigger("cleared"):!0===s.isComplete.call(l,t)&&p.trigger("complete")}),0)}}},2394:function(e,t,a){Object.defineProperty(t,"__esModule",{value:!0}),t.default=void 0,a(7149),a(3194);var i=a(157),n=v(a(3287)),r=v(a(9380)),o=a(2391),s=a(4713),l=a(8711),u=a(7215),c=a(7760),f=a(9716),d=v(a(7392)),p=v(a(3976)),h=v(a(8741));function m(e){return(m="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function v(e){return e&&e.__esModule?e:{default:e}}var g=r.default.document,k="_inputmask_opts";function y(e,t,a){if(h.default){if(!(this instanceof y))return new y(e,t,a);this.dependencyLib=n.default,this.el=void 0,this.events={},this.maskset=void 0,!0!==a&&("[object Object]"===Object.prototype.toString.call(e)?t=e:(t=t||{},e&&(t.alias=e)),this.opts=n.default.extend(!0,{},this.defaults,t),this.noMasksCache=t&&void 0!==t.definitions,this.userOptions=t||{},b(this.opts.alias,t,this.opts)),this.refreshValue=!1,this.undoValue=void 0,this.$el=void 0,this.skipKeyPressEvent=!1,this.skipInputEvent=!1,this.validationEvent=!1,this.ignorable=!1,this.maxLength,this.mouseEnter=!1,this.originalPlaceholder=void 0,this.isComposing=!1}}function b(e,t,a){var i=y.prototype.aliases[e];return i?(i.alias&&b(i.alias,void 0,a),n.default.extend(!0,a,i),n.default.extend(!0,a,t),!0):(null===a.mask&&(a.mask=e),!1)}y.prototype={dataAttribute:"data-inputmask",defaults:p.default,definitions:d.default,aliases:{},masksCache:{},get isRTL(){return this.opts.isRTL||this.opts.numericInput},mask:function(e){var t=this;return"string"==typeof e&&(e=g.getElementById(e)||g.querySelectorAll(e)),(e=e.nodeName?[e]:Array.isArray(e)?e:Array.from(e)).forEach((function(e,a){var s=n.default.extend(!0,{},t.opts);if(function(e,t,a,i){function o(t,n){var o=""===i?t:i+"-"+t;null!==(n=void 0!==n?n:e.getAttribute(o))&&("string"==typeof n&&(0===t.indexOf("on")?n=r.default[n]:"false"===n?n=!1:"true"===n&&(n=!0)),a[t]=n)}if(!0===t.importDataAttributes){var s,l,u,c,f=e.getAttribute(i);if(f&&""!==f&&(f=f.replace(/'/g,'"'),l=JSON.parse("{"+f+"}")),l)for(c in u=void 0,l)if("alias"===c.toLowerCase()){u=l[c];break}for(s in o("alias",u),a.alias&&b(a.alias,a,t),t){if(l)for(c in u=void 0,l)if(c.toLowerCase()===s.toLowerCase()){u=l[c];break}o(s,u)}}n.default.extend(!0,t,a),("rtl"===e.dir||t.rightAlign)&&(e.style.textAlign="right");("rtl"===e.dir||t.numericInput)&&(e.dir="ltr",e.removeAttribute("dir"),t.isRTL=!0);return Object.keys(a).length}(e,s,n.default.extend(!0,{},t.userOptions),t.dataAttribute)){var l=(0,o.generateMaskSet)(s,t.noMasksCache);void 0!==l&&(void 0!==e.inputmask&&(e.inputmask.opts.autoUnmask=!0,e.inputmask.remove()),e.inputmask=new y(void 0,void 0,!0),e.inputmask.opts=s,e.inputmask.noMasksCache=t.noMasksCache,e.inputmask.userOptions=n.default.extend(!0,{},t.userOptions),e.inputmask.el=e,e.inputmask.$el=(0,n.default)(e),e.inputmask.maskset=l,n.default.data(e,k,t.userOptions),i.mask.call(e.inputmask))}})),e&&e[0]&&e[0].inputmask||this},option:function(e,t){return"string"==typeof e?this.opts[e]:"object"===m(e)?(n.default.extend(this.userOptions,e),this.el&&!0!==t&&this.mask(this.el),this):void 0},unmaskedvalue:function(e){if(this.maskset=this.maskset||(0,o.generateMaskSet)(this.opts,this.noMasksCache),void 0===this.el||void 0!==e){var t=("function"==typeof this.opts.onBeforeMask&&this.opts.onBeforeMask.call(this,e,this.opts)||e).split("");c.checkVal.call(this,void 0,!1,!1,t),"function"==typeof this.opts.onBeforeWrite&&this.opts.onBeforeWrite.call(this,void 0,l.getBuffer.call(this),0,this.opts)}return c.unmaskedvalue.call(this,this.el)},remove:function(){if(this.el){n.default.data(this.el,k,null);var e=this.opts.autoUnmask?(0,c.unmaskedvalue)(this.el):this._valueGet(this.opts.autoUnmask);e!==l.getBufferTemplate.call(this).join("")?this._valueSet(e,this.opts.autoUnmask):this._valueSet(""),f.EventRuler.off(this.el),Object.getOwnPropertyDescriptor&&Object.getPrototypeOf?Object.getOwnPropertyDescriptor(Object.getPrototypeOf(this.el),"value")&&this.__valueGet&&Object.defineProperty(this.el,"value",{get:this.__valueGet,set:this.__valueSet,configurable:!0}):g.__lookupGetter__&&this.el.__lookupGetter__("value")&&this.__valueGet&&(this.el.__defineGetter__("value",this.__valueGet),this.el.__defineSetter__("value",this.__valueSet)),this.el.inputmask=void 0}return this.el},getemptymask:function(){return this.maskset=this.maskset||(0,o.generateMaskSet)(this.opts,this.noMasksCache),l.getBufferTemplate.call(this).join("")},hasMaskedValue:function(){return!this.opts.autoUnmask},isComplete:function(){return this.maskset=this.maskset||(0,o.generateMaskSet)(this.opts,this.noMasksCache),u.isComplete.call(this,l.getBuffer.call(this))},getmetadata:function(){if(this.maskset=this.maskset||(0,o.generateMaskSet)(this.opts,this.noMasksCache),Array.isArray(this.maskset.metadata)){var e=s.getMaskTemplate.call(this,!0,0,!1).join("");return this.maskset.metadata.forEach((function(t){return t.mask!==e||(e=t,!1)})),e}return this.maskset.metadata},isValid:function(e){if(this.maskset=this.maskset||(0,o.generateMaskSet)(this.opts,this.noMasksCache),e){var t=("function"==typeof this.opts.onBeforeMask&&this.opts.onBeforeMask.call(this,e,this.opts)||e).split("");c.checkVal.call(this,void 0,!0,!1,t)}else e=this.isRTL?l.getBuffer.call(this).slice().reverse().join(""):l.getBuffer.call(this).join("");for(var a=l.getBuffer.call(this),i=l.determineLastRequiredPosition.call(this),n=a.length-1;n>i&&!l.isMask.call(this,n);n--);return a.splice(i,n+1-i),u.isComplete.call(this,a)&&e===(this.isRTL?l.getBuffer.call(this).slice().reverse().join(""):l.getBuffer.call(this).join(""))},format:function(e,t){this.maskset=this.maskset||(0,o.generateMaskSet)(this.opts,this.noMasksCache);var a=("function"==typeof this.opts.onBeforeMask&&this.opts.onBeforeMask.call(this,e,this.opts)||e).split("");c.checkVal.call(this,void 0,!0,!1,a);var i=this.isRTL?l.getBuffer.call(this).slice().reverse().join(""):l.getBuffer.call(this).join("");return t?{value:i,metadata:this.getmetadata()}:i},setValue:function(e){this.el&&(0,n.default)(this.el).trigger("setvalue",[e])},analyseMask:o.analyseMask},y.extendDefaults=function(e){n.default.extend(!0,y.prototype.defaults,e)},y.extendDefinitions=function(e){n.default.extend(!0,y.prototype.definitions,e)},y.extendAliases=function(e){n.default.extend(!0,y.prototype.aliases,e)},y.format=function(e,t,a){return y(t).format(e,a)},y.unmask=function(e,t){return y(t).unmaskedvalue(e)},y.isValid=function(e,t){return y(t).isValid(e)},y.remove=function(e){"string"==typeof e&&(e=g.getElementById(e)||g.querySelectorAll(e)),(e=e.nodeName?[e]:e).forEach((function(e){e.inputmask&&e.inputmask.remove()}))},y.setValue=function(e,t){"string"==typeof e&&(e=g.getElementById(e)||g.querySelectorAll(e)),(e=e.nodeName?[e]:e).forEach((function(e){e.inputmask?e.inputmask.setValue(t):(0,n.default)(e).trigger("setvalue",[t])}))},y.dependencyLib=n.default,r.default.Inputmask=y;var x=y;t.default=x},5296:function(e,t,a){function i(e){return(i="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}var n=p(a(9380)),r=p(a(2394)),o=p(a(8741));function s(e,t){return!t||"object"!==i(t)&&"function"!=typeof t?function(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}(e):t}function l(e){var t="function"==typeof Map?new Map:void 0;return(l=function(e){if(null===e||(a=e,-1===Function.toString.call(a).indexOf("[native code]")))return e;var a;if("function"!=typeof e)throw new TypeError("Super expression must either be null or a function");if(void 0!==t){if(t.has(e))return t.get(e);t.set(e,i)}function i(){return u(e,arguments,d(this).constructor)}return i.prototype=Object.create(e.prototype,{constructor:{value:i,enumerable:!1,writable:!0,configurable:!0}}),f(i,e)})(e)}function u(e,t,a){return(u=c()?Reflect.construct:function(e,t,a){var i=[null];i.push.apply(i,t);var n=new(Function.bind.apply(e,i));return a&&f(n,a.prototype),n}).apply(null,arguments)}function c(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],(function(){}))),!0}catch(e){return!1}}function f(e,t){return(f=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e})(e,t)}function d(e){return(d=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}function p(e){return e&&e.__esModule?e:{default:e}}var h=n.default.document;if(o.default&&h&&h.head&&h.head.attachShadow&&n.default.customElements&&void 0===n.default.customElements.get("input-mask")){var m=function(e){!function(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),t&&f(e,t)}(n,e);var t,a,i=(t=n,a=c(),function(){var e,i=d(t);if(a){var n=d(this).constructor;e=Reflect.construct(i,arguments,n)}else e=i.apply(this,arguments);return s(this,e)});function n(){var e;!function(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}(this,n);var t=(e=i.call(this)).getAttributeNames(),a=e.attachShadow({mode:"closed"}),o=h.createElement("input");for(var s in o.type="text",a.appendChild(o),t)Object.prototype.hasOwnProperty.call(t,s)&&o.setAttribute(t[s],e.getAttribute(t[s]));var l=new r.default;return l.dataAttribute="",l.mask(o),o.inputmask.shadowRoot=a,e}return n}(l(HTMLElement));n.default.customElements.define("input-mask",m)}},443:function(e,t,a){var i=o(a(8254)),n=o(a(2394));function r(e){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function o(e){return e&&e.__esModule?e:{default:e}}void 0===i.default.fn.inputmask&&(i.default.fn.inputmask=function(e,t){var a,o=this[0];if(void 0===t&&(t={}),"string"==typeof e)switch(e){case"unmaskedvalue":return o&&o.inputmask?o.inputmask.unmaskedvalue():(0,i.default)(o).val();case"remove":return this.each((function(){this.inputmask&&this.inputmask.remove()}));case"getemptymask":return o&&o.inputmask?o.inputmask.getemptymask():"";case"hasMaskedValue":return!(!o||!o.inputmask)&&o.inputmask.hasMaskedValue();case"isComplete":return!o||!o.inputmask||o.inputmask.isComplete();case"getmetadata":return o&&o.inputmask?o.inputmask.getmetadata():void 0;case"setvalue":n.default.setValue(o,t);break;case"option":if("string"!=typeof t)return this.each((function(){if(void 0!==this.inputmask)return this.inputmask.option(t)}));if(o&&void 0!==o.inputmask)return o.inputmask.option(t);break;default:return t.alias=e,a=new n.default(t),this.each((function(){a.mask(this)}))}else{if(Array.isArray(e))return t.alias=e,a=new n.default(t),this.each((function(){a.mask(this)}));if("object"==r(e))return a=new n.default(e),void 0===e.mask&&void 0===e.alias?this.each((function(){if(void 0!==this.inputmask)return this.inputmask.option(e);a.mask(this)})):this.each((function(){a.mask(this)}));if(void 0===e)return this.each((function(){(a=new n.default(t)).mask(this)}))}})},2391:function(e,t,a){Object.defineProperty(t,"__esModule",{value:!0}),t.generateMaskSet=function(e,t){var a;function n(e,a,n){var r,o,s=!1;if(null!==e&&""!==e||((s=null!==n.regex)?e=(e=n.regex).replace(/^(\^)(.*)(\$)$/,"$2"):(s=!0,e=".*")),1===e.length&&!1===n.greedy&&0!==n.repeat&&(n.placeholder=""),n.repeat>0||"*"===n.repeat||"+"===n.repeat){var l="*"===n.repeat?0:"+"===n.repeat?1:n.repeat;e=n.groupmarker[0]+e+n.groupmarker[1]+n.quantifiermarker[0]+l+","+n.repeat+n.quantifiermarker[1]}return o=s?"regex_"+n.regex:n.numericInput?e.split("").reverse().join(""):e,!1!==n.keepStatic&&(o="ks_"+o),void 0===Inputmask.prototype.masksCache[o]||!0===t?(r={mask:e,maskToken:Inputmask.prototype.analyseMask(e,s,n),validPositions:{},_buffer:void 0,buffer:void 0,tests:{},excludes:{},metadata:a,maskLength:void 0,jitOffset:{}},!0!==t&&(Inputmask.prototype.masksCache[o]=r,r=i.default.extend(!0,{},Inputmask.prototype.masksCache[o]))):r=i.default.extend(!0,{},Inputmask.prototype.masksCache[o]),r}"function"==typeof e.mask&&(e.mask=e.mask(e));if(Array.isArray(e.mask)){if(e.mask.length>1){null===e.keepStatic&&(e.keepStatic=!0);var r=e.groupmarker[0];return(e.isRTL?e.mask.reverse():e.mask).forEach((function(t){r.length>1&&(r+=e.groupmarker[1]+e.alternatormarker+e.groupmarker[0]),void 0!==t.mask&&"function"!=typeof t.mask?r+=t.mask:r+=t})),n(r+=e.groupmarker[1],e.mask,e)}e.mask=e.mask.pop()}null===e.keepStatic&&(e.keepStatic=!1);a=e.mask&&void 0!==e.mask.mask&&"function"!=typeof e.mask.mask?n(e.mask.mask,e.mask,e):n(e.mask,e.mask,e);return a},t.analyseMask=function(e,t,a){var i,r,o,s,l,u,c=/(?:[?*+]|\{[0-9+*]+(?:,[0-9+*]*)?(?:\|[0-9+*]*)?\})|[^.?*+^${[]()|\\]+|./g,f=/\[\^?]?(?:[^\\\]]+|\\[\S\s]?)*]?|\\(?:0(?:[0-3][0-7]{0,2}|[4-7][0-7]?)?|[1-9][0-9]*|x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|c[A-Za-z]|[\S\s]?)|\((?:\?[:=!]?)?|(?:[?*+]|\{[0-9]+(?:,[0-9]*)?\})\??|[^.?*+^${[()|\\]+|./g,d=!1,p=new n.default,h=[],m=[],v=!1;function g(e,i,n){n=void 0!==n?n:e.matches.length;var r=e.matches[n-1];if(t)0===i.indexOf("[")||d&&/\\d|\\s|\\w]/i.test(i)||"."===i?e.matches.splice(n++,0,{fn:new RegExp(i,a.casing?"i":""),static:!1,optionality:!1,newBlockMarker:void 0===r?"master":r.def!==i,casing:null,def:i,placeholder:void 0,nativeDef:i}):(d&&(i=i[i.length-1]),i.split("").forEach((function(t,i){r=e.matches[n-1],e.matches.splice(n++,0,{fn:/[a-z]/i.test(a.staticDefinitionSymbol||t)?new RegExp("["+(a.staticDefinitionSymbol||t)+"]",a.casing?"i":""):null,static:!0,optionality:!1,newBlockMarker:void 0===r?"master":r.def!==t&&!0!==r.static,casing:null,def:a.staticDefinitionSymbol||t,placeholder:void 0!==a.staticDefinitionSymbol?t:void 0,nativeDef:(d?"'":"")+t})}))),d=!1;else{var o=a.definitions&&a.definitions[i]||a.usePrototypeDefinitions&&Inputmask.prototype.definitions[i];o&&!d?e.matches.splice(n++,0,{fn:o.validator?"string"==typeof o.validator?new RegExp(o.validator,a.casing?"i":""):new function(){this.test=o.validator}:new RegExp("."),static:o.static||!1,optionality:!1,newBlockMarker:void 0===r?"master":r.def!==(o.definitionSymbol||i),casing:o.casing,def:o.definitionSymbol||i,placeholder:o.placeholder,nativeDef:i,generated:o.generated}):(e.matches.splice(n++,0,{fn:/[a-z]/i.test(a.staticDefinitionSymbol||i)?new RegExp("["+(a.staticDefinitionSymbol||i)+"]",a.casing?"i":""):null,static:!0,optionality:!1,newBlockMarker:void 0===r?"master":r.def!==i&&!0!==r.static,casing:null,def:a.staticDefinitionSymbol||i,placeholder:void 0!==a.staticDefinitionSymbol?i:void 0,nativeDef:(d?"'":"")+i}),d=!1)}}function k(){if(h.length>0){if(g(s=h[h.length-1],r),s.isAlternator){l=h.pop();for(var e=0;e<l.matches.length;e++)l.matches[e].isGroup&&(l.matches[e].isGroup=!1);h.length>0?(s=h[h.length-1]).matches.push(l):p.matches.push(l)}}else g(p,r)}function y(e){var t=new n.default(!0);return t.openGroup=!1,t.matches=e,t}function b(){if((o=h.pop()).openGroup=!1,void 0!==o)if(h.length>0){if((s=h[h.length-1]).matches.push(o),s.isAlternator){l=h.pop();for(var e=0;e<l.matches.length;e++)l.matches[e].isGroup=!1,l.matches[e].alternatorGroup=!1;h.length>0?(s=h[h.length-1]).matches.push(l):p.matches.push(l)}}else p.matches.push(o);else k()}function x(e){var t=e.pop();return t.isQuantifier&&(t=y([e.pop(),t])),t}t&&(a.optionalmarker[0]=void 0,a.optionalmarker[1]=void 0);for(;i=t?f.exec(e):c.exec(e);){if(r=i[0],t)switch(r.charAt(0)){case"?":r="{0,1}";break;case"+":case"*":r="{"+r+"}";break;case"|":if(0===h.length){var P=y(p.matches);P.openGroup=!0,h.push(P),p.matches=[],v=!0}}if(d)k();else switch(r.charAt(0)){case"$":case"^":t||k();break;case"(?=":case"(?!":case"(?<=":case"(?<!":h.push(new n.default(!0));break;case a.escapeChar:d=!0,t&&k();break;case a.optionalmarker[1]:case a.groupmarker[1]:b();break;case a.optionalmarker[0]:h.push(new n.default(!1,!0));break;case a.groupmarker[0]:h.push(new n.default(!0));break;case a.quantifiermarker[0]:var E=new n.default(!1,!1,!0),S=(r=r.replace(/[{}]/g,"")).split("|"),_=S[0].split(","),M=isNaN(_[0])?_[0]:parseInt(_[0]),w=1===_.length?M:isNaN(_[1])?_[1]:parseInt(_[1]);"*"!==M&&"+"!==M||(M="*"===w?0:1),E.quantifier={min:M,max:w,jit:S[1]};var O=h.length>0?h[h.length-1].matches:p.matches;if((i=O.pop()).isAlternator){O.push(i),O=i.matches;var T=new n.default(!0),A=O.pop();O.push(T),O=T.matches,i=A}i.isGroup||(i=y([i])),O.push(i),O.push(E);break;case a.alternatormarker:if(h.length>0){var C=(s=h[h.length-1]).matches[s.matches.length-1];u=s.openGroup&&(void 0===C.matches||!1===C.isGroup&&!1===C.isAlternator)?h.pop():x(s.matches)}else u=x(p.matches);if(u.isAlternator)h.push(u);else if(u.alternatorGroup?(l=h.pop(),u.alternatorGroup=!1):l=new n.default(!1,!1,!1,!0),l.matches.push(u),h.push(l),u.openGroup){u.openGroup=!1;var D=new n.default(!0);D.alternatorGroup=!0,h.push(D)}break;default:k()}}v&&b();for(;h.length>0;)o=h.pop(),p.matches.push(o);p.matches.length>0&&(!function e(i){i&&i.matches&&i.matches.forEach((function(n,r){var o=i.matches[r+1];(void 0===o||void 0===o.matches||!1===o.isQuantifier)&&n&&n.isGroup&&(n.isGroup=!1,t||(g(n,a.groupmarker[0],0),!0!==n.openGroup&&g(n,a.groupmarker[1]))),e(n)}))}(p),m.push(p));(a.numericInput||a.isRTL)&&function e(t){for(var i in t.matches=t.matches.reverse(),t.matches)if(Object.prototype.hasOwnProperty.call(t.matches,i)){var n=parseInt(i);if(t.matches[i].isQuantifier&&t.matches[n+1]&&t.matches[n+1].isGroup){var r=t.matches[i];t.matches.splice(i,1),t.matches.splice(n+1,0,r)}void 0!==t.matches[i].matches?t.matches[i]=e(t.matches[i]):t.matches[i]=((o=t.matches[i])===a.optionalmarker[0]?o=a.optionalmarker[1]:o===a.optionalmarker[1]?o=a.optionalmarker[0]:o===a.groupmarker[0]?o=a.groupmarker[1]:o===a.groupmarker[1]&&(o=a.groupmarker[0]),o)}var o;return t}(m[0]);return m};var i=r(a(3287)),n=r(a(9695));function r(e){return e&&e.__esModule?e:{default:e}}},157:function(e,t,a){Object.defineProperty(t,"__esModule",{value:!0}),t.mask=function(){var e=this,t=this.opts,a=this.el,i=this.dependencyLib;s.EventRuler.off(a);var f=function(t,a){"textarea"!==t.tagName.toLowerCase()&&a.ignorables.push(n.default.ENTER);var l=t.getAttribute("type"),u="input"===t.tagName.toLowerCase()&&a.supportsInputType.includes(l)||t.isContentEditable||"textarea"===t.tagName.toLowerCase();if(!u)if("input"===t.tagName.toLowerCase()){var c=document.createElement("input");c.setAttribute("type",l),u="text"===c.type,c=null}else u="partial";return!1!==u?function(t){var n,l;function u(){return this.inputmask?this.inputmask.opts.autoUnmask?this.inputmask.unmaskedvalue():-1!==r.getLastValidPosition.call(e)||!0!==a.nullable?(this.inputmask.shadowRoot||this.ownerDocument).activeElement===this&&a.clearMaskOnLostFocus?(e.isRTL?o.clearOptionalTail.call(e,r.getBuffer.call(e).slice()).reverse():o.clearOptionalTail.call(e,r.getBuffer.call(e).slice())).join(""):n.call(this):"":n.call(this)}function c(e){l.call(this,e),this.inputmask&&(0,o.applyInputValue)(this,e)}if(!t.inputmask.__valueGet){if(!0!==a.noValuePatching){if(Object.getOwnPropertyDescriptor){var f=Object.getPrototypeOf?Object.getOwnPropertyDescriptor(Object.getPrototypeOf(t),"value"):void 0;f&&f.get&&f.set?(n=f.get,l=f.set,Object.defineProperty(t,"value",{get:u,set:c,configurable:!0})):"input"!==t.tagName.toLowerCase()&&(n=function(){return this.textContent},l=function(e){this.textContent=e},Object.defineProperty(t,"value",{get:u,set:c,configurable:!0}))}else document.__lookupGetter__&&t.__lookupGetter__("value")&&(n=t.__lookupGetter__("value"),l=t.__lookupSetter__("value"),t.__defineGetter__("value",u),t.__defineSetter__("value",c));t.inputmask.__valueGet=n,t.inputmask.__valueSet=l}t.inputmask._valueGet=function(t){return e.isRTL&&!0!==t?n.call(this.el).split("").reverse().join(""):n.call(this.el)},t.inputmask._valueSet=function(t,a){l.call(this.el,null==t?"":!0!==a&&e.isRTL?t.split("").reverse().join(""):t)},void 0===n&&(n=function(){return this.value},l=function(e){this.value=e},function(t){if(i.valHooks&&(void 0===i.valHooks[t]||!0!==i.valHooks[t].inputmaskpatch)){var n=i.valHooks[t]&&i.valHooks[t].get?i.valHooks[t].get:function(e){return e.value},s=i.valHooks[t]&&i.valHooks[t].set?i.valHooks[t].set:function(e,t){return e.value=t,e};i.valHooks[t]={get:function(t){if(t.inputmask){if(t.inputmask.opts.autoUnmask)return t.inputmask.unmaskedvalue();var i=n(t);return-1!==r.getLastValidPosition.call(e,void 0,void 0,t.inputmask.maskset.validPositions)||!0!==a.nullable?i:""}return n(t)},set:function(e,t){var a=s(e,t);return e.inputmask&&(0,o.applyInputValue)(e,t),a},inputmaskpatch:!0}}}(t.type),function(t){s.EventRuler.on(t,"mouseenter",(function(){var t=this.inputmask._valueGet(!0);t!==(e.isRTL?r.getBuffer.call(e).reverse():r.getBuffer.call(e)).join("")&&(0,o.applyInputValue)(this,t)}))}(t))}}(t):t.inputmask=void 0,u}(a,t);if(!1!==f){e.originalPlaceholder=a.placeholder,e.maxLength=void 0!==a?a.maxLength:void 0,-1===e.maxLength&&(e.maxLength=void 0),"inputMode"in a&&null===a.getAttribute("inputmode")&&(a.inputMode=t.inputmode,a.setAttribute("inputmode",t.inputmode)),!0===f&&(t.showMaskOnFocus=t.showMaskOnFocus&&-1===["cc-number","cc-exp"].indexOf(a.autocomplete),l.iphone&&(t.insertModeVisual=!1),s.EventRuler.on(a,"submit",c.EventHandlers.submitEvent),s.EventRuler.on(a,"reset",c.EventHandlers.resetEvent),s.EventRuler.on(a,"blur",c.EventHandlers.blurEvent),s.EventRuler.on(a,"focus",c.EventHandlers.focusEvent),s.EventRuler.on(a,"invalid",c.EventHandlers.invalidEvent),s.EventRuler.on(a,"click",c.EventHandlers.clickEvent),s.EventRuler.on(a,"mouseleave",c.EventHandlers.mouseleaveEvent),s.EventRuler.on(a,"mouseenter",c.EventHandlers.mouseenterEvent),s.EventRuler.on(a,"paste",c.EventHandlers.pasteEvent),s.EventRuler.on(a,"cut",c.EventHandlers.cutEvent),s.EventRuler.on(a,"complete",t.oncomplete),s.EventRuler.on(a,"incomplete",t.onincomplete),s.EventRuler.on(a,"cleared",t.oncleared),!0!==t.inputEventOnly&&(s.EventRuler.on(a,"keydown",c.EventHandlers.keydownEvent),s.EventRuler.on(a,"keypress",c.EventHandlers.keypressEvent),s.EventRuler.on(a,"keyup",c.EventHandlers.keyupEvent)),(l.mobile||t.inputEventOnly)&&a.removeAttribute("maxLength"),s.EventRuler.on(a,"input",c.EventHandlers.inputFallBackEvent),s.EventRuler.on(a,"compositionend",c.EventHandlers.compositionendEvent)),s.EventRuler.on(a,"setvalue",c.EventHandlers.setValueEvent),r.getBufferTemplate.call(e).join(""),e.undoValue=e._valueGet(!0);var d=(a.inputmask.shadowRoot||a.ownerDocument).activeElement;if(""!==a.inputmask._valueGet(!0)||!1===t.clearMaskOnLostFocus||d===a){(0,o.applyInputValue)(a,a.inputmask._valueGet(!0),t);var p=r.getBuffer.call(e).slice();!1===u.isComplete.call(e,p)&&t.clearIncomplete&&r.resetMaskSet.call(e),t.clearMaskOnLostFocus&&d!==a&&(-1===r.getLastValidPosition.call(e)?p=[]:o.clearOptionalTail.call(e,p)),(!1===t.clearMaskOnLostFocus||t.showMaskOnFocus&&d===a||""!==a.inputmask._valueGet(!0))&&(0,o.writeBuffer)(a,p),d===a&&r.caret.call(e,a,r.seekNext.call(e,r.getLastValidPosition.call(e)))}}};var i,n=(i=a(4528))&&i.__esModule?i:{default:i},r=a(8711),o=a(7760),s=a(9716),l=a(9845),u=a(7215),c=a(6030)},9695:function(e,t){Object.defineProperty(t,"__esModule",{value:!0}),t.default=function(e,t,a,i){this.matches=[],this.openGroup=e||!1,this.alternatorGroup=!1,this.isGroup=e||!1,this.isOptional=t||!1,this.isQuantifier=a||!1,this.isAlternator=i||!1,this.quantifier={min:1,max:1}}},3194:function(){Array.prototype.includes||Object.defineProperty(Array.prototype,"includes",{value:function(e,t){if(null==this)throw new TypeError('"this" is null or not defined');var a=Object(this),i=a.length>>>0;if(0===i)return!1;for(var n=0|t,r=Math.max(n>=0?n:i-Math.abs(n),0);r<i;){if(a[r]===e)return!0;r++}return!1}})},7149:function(){function e(t){return(e="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(t)}"function"!=typeof Object.getPrototypeOf&&(Object.getPrototypeOf="object"===e("test".__proto__)?function(e){return e.__proto__}:function(e){return e.constructor.prototype})},8711:function(e,t,a){Object.defineProperty(t,"__esModule",{value:!0}),t.caret=function(e,t,a,i,n){var r,o=this,s=this.opts;if(void 0===t)return"selectionStart"in e&&"selectionEnd"in e?(t=e.selectionStart,a=e.selectionEnd):window.getSelection?(r=window.getSelection().getRangeAt(0)).commonAncestorContainer.parentNode!==e&&r.commonAncestorContainer!==e||(t=r.startOffset,a=r.endOffset):document.selection&&document.selection.createRange&&(r=document.selection.createRange(),t=0-r.duplicate().moveStart("character",-e.inputmask._valueGet().length),a=t+r.text.length),{begin:i?t:u.call(o,t),end:i?a:u.call(o,a)};if(Array.isArray(t)&&(a=o.isRTL?t[0]:t[1],t=o.isRTL?t[1]:t[0]),void 0!==t.begin&&(a=o.isRTL?t.begin:t.end,t=o.isRTL?t.end:t.begin),"number"==typeof t){t=i?t:u.call(o,t),a="number"==typeof(a=i?a:u.call(o,a))?a:t;var l=parseInt(((e.ownerDocument.defaultView||window).getComputedStyle?(e.ownerDocument.defaultView||window).getComputedStyle(e,null):e.currentStyle).fontSize)*a;if(e.scrollLeft=l>e.scrollWidth?l:0,e.inputmask.caretPos={begin:t,end:a},s.insertModeVisual&&!1===s.insertMode&&t===a&&(n||a++),e===(e.inputmask.shadowRoot||e.ownerDocument).activeElement)if("setSelectionRange"in e)e.setSelectionRange(t,a);else if(window.getSelection){if(r=document.createRange(),void 0===e.firstChild||null===e.firstChild){var c=document.createTextNode("");e.appendChild(c)}r.setStart(e.firstChild,t<e.inputmask._valueGet().length?t:e.inputmask._valueGet().length),r.setEnd(e.firstChild,a<e.inputmask._valueGet().length?a:e.inputmask._valueGet().length),r.collapse(!0);var f=window.getSelection();f.removeAllRanges(),f.addRange(r)}else e.createTextRange&&((r=e.createTextRange()).collapse(!0),r.moveEnd("character",a),r.moveStart("character",t),r.select())}},t.determineLastRequiredPosition=function(e){var t,a,r=this,s=this.maskset,l=this.dependencyLib,u=i.getMaskTemplate.call(r,!0,o.call(r),!0,!0),c=u.length,f=o.call(r),d={},p=s.validPositions[f],h=void 0!==p?p.locator.slice():void 0;for(t=f+1;t<u.length;t++)a=i.getTestTemplate.call(r,t,h,t-1),h=a.locator.slice(),d[t]=l.extend(!0,{},a);var m=p&&void 0!==p.alternation?p.locator[p.alternation]:void 0;for(t=c-1;t>f&&(((a=d[t]).match.optionality||a.match.optionalQuantifier&&a.match.newBlockMarker||m&&(m!==d[t].locator[p.alternation]&&1!=a.match.static||!0===a.match.static&&a.locator[p.alternation]&&n.checkAlternationMatch.call(r,a.locator[p.alternation].toString().split(","),m.toString().split(","))&&""!==i.getTests.call(r,t)[0].def))&&u[t]===i.getPlaceholder.call(r,t,a.match));t--)c--;return e?{l:c,def:d[c]?d[c].match:void 0}:c},t.determineNewCaretPosition=function(e,t,a){var n=this,u=this.maskset,c=this.opts;t&&(n.isRTL?e.end=e.begin:e.begin=e.end);if(e.begin===e.end){switch(a=a||c.positionCaretOnClick){case"none":break;case"select":e={begin:0,end:r.call(n).length};break;case"ignore":e.end=e.begin=l.call(n,o.call(n));break;case"radixFocus":if(function(e){if(""!==c.radixPoint&&0!==c.digits){var t=u.validPositions;if(void 0===t[e]||t[e].input===i.getPlaceholder.call(n,e)){if(e<l.call(n,-1))return!0;var a=r.call(n).indexOf(c.radixPoint);if(-1!==a){for(var o in t)if(t[o]&&a<o&&t[o].input!==i.getPlaceholder.call(n,o))return!1;return!0}}}return!1}(e.begin)){var f=r.call(n).join("").indexOf(c.radixPoint);e.end=e.begin=c.numericInput?l.call(n,f):f;break}default:var d=e.begin,p=o.call(n,d,!0),h=l.call(n,-1!==p||s.call(n,0)?p:-1);if(d<=h)e.end=e.begin=s.call(n,d,!1,!0)?d:l.call(n,d);else{var m=u.validPositions[p],v=i.getTestTemplate.call(n,h,m?m.match.locator:void 0,m),g=i.getPlaceholder.call(n,h,v.match);if(""!==g&&r.call(n)[h]!==g&&!0!==v.match.optionalQuantifier&&!0!==v.match.newBlockMarker||!s.call(n,h,c.keepStatic,!0)&&v.match.def===g){var k=l.call(n,h);(d>=k||d===h)&&(h=k)}e.end=e.begin=h}}return e}},t.getBuffer=r,t.getBufferTemplate=function(){var e=this.maskset;void 0===e._buffer&&(e._buffer=i.getMaskTemplate.call(this,!1,1),void 0===e.buffer&&(e.buffer=e._buffer.slice()));return e._buffer},t.getLastValidPosition=o,t.isMask=s,t.resetMaskSet=function(e){var t=this.maskset;t.buffer=void 0,!0!==e&&(t.validPositions={},t.p=0)},t.seekNext=l,t.seekPrevious=function(e,t){var a=this,n=e-1;if(e<=0)return 0;for(;n>0&&(!0===t&&(!0!==i.getTest.call(a,n).match.newBlockMarker||!s.call(a,n,void 0,!0))||!0!==t&&!s.call(a,n,void 0,!0));)n--;return n},t.translatePosition=u;var i=a(4713),n=a(7215);function r(e){var t=this.maskset;return void 0!==t.buffer&&!0!==e||(t.buffer=i.getMaskTemplate.call(this,!0,o.call(this),!0),void 0===t._buffer&&(t._buffer=t.buffer.slice())),t.buffer}function o(e,t,a){var i=this.maskset,n=-1,r=-1,o=a||i.validPositions;for(var s in void 0===e&&(e=-1),o){var l=parseInt(s);o[l]&&(t||!0!==o[l].generatedInput)&&(l<=e&&(n=l),l>=e&&(r=l))}return-1===n||n==e?r:-1==r||e-n<r-e?n:r}function s(e,t,a){var n=this,r=this.maskset,o=i.getTestTemplate.call(n,e).match;if(""===o.def&&(o=i.getTest.call(n,e).match),!0!==o.static)return o.fn;if(!0===a&&void 0!==r.validPositions[e]&&!0!==r.validPositions[e].generatedInput)return!0;if(!0!==t&&e>-1){if(a){var s=i.getTests.call(n,e);return s.length>1+(""===s[s.length-1].match.def?1:0)}var l=i.determineTestTemplate.call(n,e,i.getTests.call(n,e)),u=i.getPlaceholder.call(n,e,l.match);return l.match.def!==u}return!1}function l(e,t,a){var n=this;void 0===a&&(a=!0);for(var r=e+1;""!==i.getTest.call(n,r).match.def&&(!0===t&&(!0!==i.getTest.call(n,r).match.newBlockMarker||!s.call(n,r,void 0,!0))||!0!==t&&!s.call(n,r,void 0,a));)r++;return r}function u(e){var t=this.opts,a=this.el;return!this.isRTL||"number"!=typeof e||t.greedy&&""===t.placeholder||!a||(e=Math.abs(this._valueGet().length-e)),e}},4713:function(e,t){function a(e,t){var a=(null!=e.alternation?e.mloc[i(e)]:e.locator).join("");if(""!==a)for(;a.length<t;)a+="0";return a}function i(e){var t=e.locator[e.alternation];return"string"==typeof t&&t.length>0&&(t=t.split(",")[0]),void 0!==t?t.toString():""}function n(e,t,a){var i=this.opts,n=this.maskset;if(void 0!==(t=t||s.call(this,e).match).placeholder||!0===a)return"function"==typeof t.placeholder?t.placeholder(i):t.placeholder;if(!0===t.static){if(e>-1&&void 0===n.validPositions[e]){var r,o=u.call(this,e),l=[];if(o.length>1+(""===o[o.length-1].match.def?1:0))for(var c=0;c<o.length;c++)if(""!==o[c].match.def&&!0!==o[c].match.optionality&&!0!==o[c].match.optionalQuantifier&&(!0===o[c].match.static||void 0===r||!1!==o[c].match.fn.test(r.match.def,n,e,!0,i))&&(l.push(o[c]),!0===o[c].match.static&&(r=o[c]),l.length>1&&/[0-9a-bA-Z]/.test(l[0].match.def)))return i.placeholder.charAt(e%i.placeholder.length)}return t.def}return i.placeholder.charAt(e%i.placeholder.length)}function r(e,t,a){return this.maskset.validPositions[e]||o.call(this,e,u.call(this,e,t?t.slice():t,a))}function o(e,t){var i=this.opts;e=e>0?e-1:0;for(var n,r,o,l=a(s.call(this,e)),u=0;u<t.length;u++){var c=t[u];n=a(c,l.length);var f=Math.abs(n-l);(void 0===r||""!==n&&f<r||o&&!i.greedy&&o.match.optionality&&"master"===o.match.newBlockMarker&&(!c.match.optionality||!c.match.newBlockMarker)||o&&o.match.optionalQuantifier&&!c.match.optionalQuantifier)&&(r=f,o=c)}return o}function s(e,t){var a=this.maskset;return a.validPositions[e]?a.validPositions[e]:(t||u.call(this,e))[0]}function l(e,t,a){function i(e){for(var t,a=[],i=-1,n=0,r=e.length;n<r;n++)if("-"===e.charAt(n))for(t=e.charCodeAt(n+1);++i<t;)a.push(String.fromCharCode(i));else i=e.charCodeAt(n),a.push(e.charAt(n));return a.join("")}return e.match.def===t.match.nativeDef||!(!(a.regex||e.match.fn instanceof RegExp&&t.match.fn instanceof RegExp)||!0===e.match.static||!0===t.match.static)&&-1!==i(t.match.fn.toString().replace(/[[\]/]/g,"")).indexOf(i(e.match.fn.toString().replace(/[[\]/]/g,"")))}function u(e,t,a){var i,n=this,r=this.dependencyLib,s=this.maskset,u=this.opts,c=this.el,f=s.maskToken,d=t?a:0,p=t?t.slice():[0],h=[],m=!1,v=t?t.join(""):"";function g(t,a,n,r){function o(n,r,f){function p(e,t){var a=0===t.matches.indexOf(e);return a||t.matches.every((function(i,n){return!0===i.isQuantifier?a=p(e,t.matches[n-1]):Object.prototype.hasOwnProperty.call(i,"matches")&&(a=p(e,i)),!a})),a}function k(e,t,a){var i,n;if((s.tests[e]||s.validPositions[e])&&(s.tests[e]||[s.validPositions[e]]).every((function(e,r){if(e.mloc[t])return i=e,!1;var o=void 0!==a?a:e.alternation,s=void 0!==e.locator[o]?e.locator[o].toString().indexOf(t):-1;return(void 0===n||s<n)&&-1!==s&&(i=e,n=s),!0})),i){var r=i.locator[i.alternation];return(i.mloc[t]||i.mloc[r]||i.locator).slice((void 0!==a?a:i.alternation)+1)}return void 0!==a?k(e,t):void 0}function y(e,t){var a=e.alternation,i=void 0===t||a===t.alternation&&-1===e.locator[a].toString().indexOf(t.locator[a]);if(!i&&a>t.alternation)for(var n=t.alternation;n<a;n++)if(e.locator[n]!==t.locator[n]){a=n,i=!0;break}if(i){e.mloc=e.mloc||{};var r=e.locator[a];if(void 0!==r){if("string"==typeof r&&(r=r.split(",")[0]),void 0===e.mloc[r]&&(e.mloc[r]=e.locator.slice()),void 0!==t){for(var o in t.mloc)"string"==typeof o&&(o=o.split(",")[0]),void 0===e.mloc[o]&&(e.mloc[o]=t.mloc[o]);e.locator[a]=Object.keys(e.mloc).join(",")}return!0}e.alternation=void 0}return!1}function b(e,t){if(e.locator.length!==t.locator.length)return!1;for(var a=e.alternation+1;a<e.locator.length;a++)if(e.locator[a]!==t.locator[a])return!1;return!0}if(d>e+u._maxTestPos)throw"Inputmask: There is probably an error in your mask definition or in the code. Create an issue on github with an example of the mask you are using. "+s.mask;if(d===e&&void 0===n.matches)return h.push({match:n,locator:r.reverse(),cd:v,mloc:{}}),!0;if(void 0!==n.matches){if(n.isGroup&&f!==n){if(n=o(t.matches[t.matches.indexOf(n)+1],r,f))return!0}else if(n.isOptional){var x=n,P=h.length;if(n=g(n,a,r,f)){if(h.forEach((function(e,t){t>=P&&(e.match.optionality=!0)})),i=h[h.length-1].match,void 0!==f||!p(i,x))return!0;m=!0,d=e}}else if(n.isAlternator){var E,S=n,_=[],M=h.slice(),w=r.length,O=!1,T=a.length>0?a.shift():-1;if(-1===T||"string"==typeof T){var A,C=d,D=a.slice(),B=[];if("string"==typeof T)B=T.split(",");else for(A=0;A<S.matches.length;A++)B.push(A.toString());if(void 0!==s.excludes[e]){for(var j=B.slice(),R=0,L=s.excludes[e].length;R<L;R++){var I=s.excludes[e][R].toString().split(":");r.length==I[1]&&B.splice(B.indexOf(I[0]),1)}0===B.length&&(delete s.excludes[e],B=j)}(!0===u.keepStatic||isFinite(parseInt(u.keepStatic))&&C>=u.keepStatic)&&(B=B.slice(0,1));for(var F=0;F<B.length;F++){A=parseInt(B[F]),h=[],a="string"==typeof T&&k(d,A,w)||D.slice();var N=S.matches[A];if(N&&o(N,[A].concat(r),f))n=!0;else if(0===F&&(O=!0),N&&N.matches&&N.matches.length>S.matches[0].matches.length)break;E=h.slice(),d=C,h=[];for(var V=0;V<E.length;V++){var G=E[V],H=!1;G.match.jit=G.match.jit||O,G.alternation=G.alternation||w,y(G);for(var K=0;K<_.length;K++){var U=_[K];if("string"!=typeof T||void 0!==G.alternation&&B.includes(G.locator[G.alternation].toString())){if(G.match.nativeDef===U.match.nativeDef){H=!0,y(U,G);break}if(l(G,U,u)){y(G,U)&&(H=!0,_.splice(_.indexOf(U),0,G));break}if(l(U,G,u)){y(U,G);break}if(W=U,!0===(Q=G).match.static&&!0!==W.match.static&&W.match.fn.test(Q.match.def,s,e,!1,u,!1)){b(G,U)||void 0!==c.inputmask.userOptions.keepStatic?y(G,U)&&(H=!0,_.splice(_.indexOf(U),0,G)):u.keepStatic=!0;break}}}H||_.push(G)}}h=M.concat(_),d=e,m=h.length>0,n=_.length>0,a=D.slice()}else n=o(S.matches[T]||t.matches[T],[T].concat(r),f);if(n)return!0}else if(n.isQuantifier&&f!==t.matches[t.matches.indexOf(n)-1])for(var $=n,q=a.length>0?a.shift():0;q<(isNaN($.quantifier.max)?q+1:$.quantifier.max)&&d<=e;q++){var z=t.matches[t.matches.indexOf($)-1];if(n=o(z,[q].concat(r),z)){if((i=h[h.length-1].match).optionalQuantifier=q>=$.quantifier.min,i.jit=(q||1)*z.matches.indexOf(i)>=$.quantifier.jit,i.optionalQuantifier&&p(i,z)){m=!0,d=e;break}return i.jit&&(s.jitOffset[e]=z.matches.length-z.matches.indexOf(i)),!0}}else if(n=g(n,a,r,f))return!0}else d++;var Q,W}for(var f=a.length>0?a.shift():0;f<t.matches.length;f++)if(!0!==t.matches[f].isQuantifier){var p=o(t.matches[f],[f].concat(n),r);if(p&&d===e)return p;if(d>e)break}}if(e>-1&&(void 0===n.maxLength||e<n.maxLength)){if(void 0===t){for(var k,y=e-1;void 0===(k=s.validPositions[y]||s.tests[y])&&y>-1;)y--;void 0!==k&&y>-1&&(p=function(e,t){var a,i=[];return Array.isArray(t)||(t=[t]),t.length>0&&(void 0===t[0].alternation||!0===u.keepStatic?0===(i=o.call(n,e,t.slice()).locator.slice()).length&&(i=t[0].locator.slice()):t.forEach((function(e){""!==e.def&&(0===i.length?(a=e.alternation,i=e.locator.slice()):e.locator[a]&&-1===i[a].toString().indexOf(e.locator[a])&&(i[a]+=","+e.locator[a]))}))),i}(y,k),v=p.join(""),d=y)}if(s.tests[e]&&s.tests[e][0].cd===v)return s.tests[e];for(var b=p.shift();b<f.length;b++){if(g(f[b],p,[b])&&d===e||d>e)break}}return(0===h.length||m)&&h.push({match:{fn:null,static:!0,optionality:!1,casing:null,def:"",placeholder:""},locator:[],mloc:{},cd:v}),void 0!==t&&s.tests[e]?r.extend(!0,[],h):(s.tests[e]=r.extend(!0,[],h),s.tests[e])}Object.defineProperty(t,"__esModule",{value:!0}),t.determineTestTemplate=o,t.getDecisionTaker=i,t.getMaskTemplate=function(e,t,a,i,s){var l=this,c=this.opts,f=this.maskset,d=c.greedy;s&&(c.greedy=!1);t=t||0;var p,h,m,v,g=[],k=0;do{if(!0===e&&f.validPositions[k])m=s&&!0===f.validPositions[k].match.optionality&&void 0===f.validPositions[k+1]&&(!0===f.validPositions[k].generatedInput||f.validPositions[k].input==c.skipOptionalPartCharacter&&k>0)?o.call(l,k,u.call(l,k,p,k-1)):f.validPositions[k],h=m.match,p=m.locator.slice(),g.push(!0===a?m.input:!1===a?h.nativeDef:n.call(l,k,h));else{m=r.call(l,k,p,k-1),h=m.match,p=m.locator.slice();var y=!0!==i&&(!1!==c.jitMasking?c.jitMasking:h.jit);(v=(v&&h.static&&h.def!==c.groupSeparator&&null===h.fn||f.validPositions[k-1]&&h.static&&h.def!==c.groupSeparator&&null===h.fn)&&f.tests[k]&&1===f.tests[k].length)||!1===y||void 0===y||"number"==typeof y&&isFinite(y)&&y>k?g.push(!1===a?h.nativeDef:n.call(l,k,h)):v=!1}k++}while((void 0===l.maxLength||k<l.maxLength)&&(!0!==h.static||""!==h.def)||t>k);""===g[g.length-1]&&g.pop();!1===a&&void 0!==f.maskLength||(f.maskLength=k-1);return c.greedy=d,g},t.getPlaceholder=n,t.getTest=s,t.getTests=u,t.getTestTemplate=r,t.isSubsetOf=l},7215:function(e,t,a){Object.defineProperty(t,"__esModule",{value:!0}),t.alternate=l,t.checkAlternationMatch=function(e,t,a){for(var i,n=this.opts.greedy?t:t.slice(0,1),r=!1,o=void 0!==a?a.split(","):[],s=0;s<o.length;s++)-1!==(i=e.indexOf(o[s]))&&e.splice(i,1);for(var l=0;l<e.length;l++)if(n.includes(e[l])){r=!0;break}return r},t.isComplete=c,t.isValid=f,t.refreshFromBuffer=p,t.revalidateMask=m,t.handleRemove=function(e,t,a,i,s){var u=this,c=this.maskset,f=this.opts;if((f.numericInput||u.isRTL)&&(t===r.default.BACKSPACE?t=r.default.DELETE:t===r.default.DELETE&&(t=r.default.BACKSPACE),u.isRTL)){var d=a.end;a.end=a.begin,a.begin=d}var p,h=o.getLastValidPosition.call(u,void 0,!0);a.end>=o.getBuffer.call(u).length&&h>=a.end&&(a.end=h+1);t===r.default.BACKSPACE?a.end-a.begin<1&&(a.begin=o.seekPrevious.call(u,a.begin)):t===r.default.DELETE&&a.begin===a.end&&(a.end=o.isMask.call(u,a.end,!0,!0)?a.end+1:o.seekNext.call(u,a.end)+1);if(!1!==(p=m.call(u,a))){if(!0!==i&&!1!==f.keepStatic||null!==f.regex&&-1!==n.getTest.call(u,a.begin).match.def.indexOf("|")){var v=l.call(u,!0);if(v){var g=void 0!==v.caret?v.caret:v.pos?o.seekNext.call(u,v.pos.begin?v.pos.begin:v.pos):o.getLastValidPosition.call(u,-1,!0);(t!==r.default.DELETE||a.begin>g)&&a.begin}}!0!==i&&(c.p=t===r.default.DELETE?a.begin+p:a.begin,c.p=o.determineNewCaretPosition.call(u,{begin:c.p,end:c.p},!1).begin)}};var i,n=a(4713),r=(i=a(4528))&&i.__esModule?i:{default:i},o=a(8711),s=a(6030);function l(e,t,a,i,r,s){var u,c,d,p,h,m,v,g,k,y,b,x=this,P=this.dependencyLib,E=this.opts,S=x.maskset,_=P.extend(!0,{},S.validPositions),M=P.extend(!0,{},S.tests),w=!1,O=!1,T=void 0!==r?r:o.getLastValidPosition.call(x);if(s&&(y=s.begin,b=s.end,s.begin>s.end&&(y=s.end,b=s.begin)),-1===T&&void 0===r)u=0,c=(p=n.getTest.call(x,u)).alternation;else for(;T>=0;T--)if((d=S.validPositions[T])&&void 0!==d.alternation){if(p&&p.locator[d.alternation]!==d.locator[d.alternation])break;u=T,c=S.validPositions[u].alternation,p=d}if(void 0!==c){v=parseInt(u),S.excludes[v]=S.excludes[v]||[],!0!==e&&S.excludes[v].push((0,n.getDecisionTaker)(p)+":"+p.alternation);var A=[],C=-1;for(h=v;h<o.getLastValidPosition.call(x,void 0,!0)+1;h++)-1===C&&e<=h&&void 0!==t&&(A.push(t),C=A.length-1),(m=S.validPositions[h])&&!0!==m.generatedInput&&(void 0===s||h<y||h>=b)&&A.push(m.input),delete S.validPositions[h];for(-1===C&&void 0!==t&&(A.push(t),C=A.length-1);void 0!==S.excludes[v]&&S.excludes[v].length<10;){for(S.tests={},o.resetMaskSet.call(x,!0),w=!0,h=0;h<A.length&&(g=w.caret||o.getLastValidPosition.call(x,void 0,!0)+1,k=A[h],w=f.call(x,g,k,!1,i,!0));h++)h===C&&(O=w),1==e&&w&&(O={caretPos:h});if(w)break;if(o.resetMaskSet.call(x),p=n.getTest.call(x,v),S.validPositions=P.extend(!0,{},_),S.tests=P.extend(!0,{},M),!S.excludes[v]){O=l.call(x,e,t,a,i,v-1,s);break}var D=(0,n.getDecisionTaker)(p);if(-1!==S.excludes[v].indexOf(D+":"+p.alternation)){O=l.call(x,e,t,a,i,v-1,s);break}for(S.excludes[v].push(D+":"+p.alternation),h=v;h<o.getLastValidPosition.call(x,void 0,!0)+1;h++)delete S.validPositions[h]}}return O&&!1===E.keepStatic||delete S.excludes[v],O}function u(e,t,a){var i=this.opts,n=this.maskset;switch(i.casing||t.casing){case"upper":e=e.toUpperCase();break;case"lower":e=e.toLowerCase();break;case"title":var o=n.validPositions[a-1];e=0===a||o&&o.input===String.fromCharCode(r.default.SPACE)?e.toUpperCase():e.toLowerCase();break;default:if("function"==typeof i.casing){var s=Array.prototype.slice.call(arguments);s.push(n.validPositions),e=i.casing.apply(this,s)}}return e}function c(e){var t=this,a=this.opts,i=this.maskset;if("function"==typeof a.isComplete)return a.isComplete(e,a);if("*"!==a.repeat){var r=!1,s=o.determineLastRequiredPosition.call(t,!0),l=o.seekPrevious.call(t,s.l);if(void 0===s.def||s.def.newBlockMarker||s.def.optionality||s.def.optionalQuantifier){r=!0;for(var u=0;u<=l;u++){var c=n.getTestTemplate.call(t,u).match;if(!0!==c.static&&void 0===i.validPositions[u]&&!0!==c.optionality&&!0!==c.optionalQuantifier||!0===c.static&&e[u]!==n.getPlaceholder.call(t,u,c)){r=!1;break}}}return r}}function f(e,t,a,i,r,s,d){var v=this,g=this.dependencyLib,k=this.opts,y=v.maskset;function b(e){return v.isRTL?e.begin-e.end>1||e.begin-e.end==1:e.end-e.begin>1||e.end-e.begin==1}a=!0===a;var x=e;function P(e){if(void 0!==e){if(void 0!==e.remove&&(Array.isArray(e.remove)||(e.remove=[e.remove]),e.remove.sort((function(e,t){return t.pos-e.pos})).forEach((function(e){m.call(v,{begin:e,end:e+1})})),e.remove=void 0),void 0!==e.insert&&(Array.isArray(e.insert)||(e.insert=[e.insert]),e.insert.sort((function(e,t){return e.pos-t.pos})).forEach((function(e){""!==e.c&&f.call(v,e.pos,e.c,void 0===e.strict||e.strict,void 0!==e.fromIsValid?e.fromIsValid:i)})),e.insert=void 0),e.refreshFromBuffer&&e.buffer){var t=e.refreshFromBuffer;p.call(v,!0===t?t:t.start,t.end,e.buffer),e.refreshFromBuffer=void 0}void 0!==e.rewritePosition&&(x=e.rewritePosition,e=!0)}return e}function E(t,a,r){var s=!1;return n.getTests.call(v,t).every((function(l,c){var f=l.match;if(o.getBuffer.call(v,!0),!1!==(s=null!=f.fn?f.fn.test(a,y,t,r,k,b(e)):(a===f.def||a===k.skipOptionalPartCharacter)&&""!==f.def&&{c:n.getPlaceholder.call(v,t,f,!0)||f.def,pos:t})){var d=void 0!==s.c?s.c:a,p=t;return d=d===k.skipOptionalPartCharacter&&!0===f.static?n.getPlaceholder.call(v,t,f,!0)||f.def:d,!0!==(s=P(s))&&void 0!==s.pos&&s.pos!==t&&(p=s.pos),!0!==s&&void 0===s.pos&&void 0===s.c?!1:(!1===m.call(v,e,g.extend({},l,{input:u.call(v,d,f,p)}),i,p)&&(s=!1),!1)}return!0})),s}void 0!==e.begin&&(x=v.isRTL?e.end:e.begin);var S=!0,_=g.extend(!0,{},y.validPositions);if(!1===k.keepStatic&&void 0!==y.excludes[x]&&!0!==r&&!0!==i)for(var M=x;M<(v.isRTL?e.begin:e.end);M++)void 0!==y.excludes[M]&&(y.excludes[M]=void 0,delete y.tests[M]);if("function"==typeof k.preValidation&&!0!==i&&!0!==s&&(S=P(S=k.preValidation.call(v,o.getBuffer.call(v),x,t,b(e),k,y,e,a||r))),!0===S){if(void 0===v.maxLength||x<o.translatePosition.call(v,v.maxLength)){if(S=E(x,t,a),(!a||!0===i)&&!1===S&&!0!==s){var w=y.validPositions[x];if(!w||!0!==w.match.static||w.match.def!==t&&t!==k.skipOptionalPartCharacter){if(k.insertMode||void 0===y.validPositions[o.seekNext.call(v,x)]||e.end>x){var O=!1;if(y.jitOffset[x]&&void 0===y.validPositions[o.seekNext.call(v,x)]&&!1!==(S=f.call(v,x+y.jitOffset[x],t,!0))&&(!0!==r&&(S.caret=x),O=!0),e.end>x&&(y.validPositions[x]=void 0),!O&&!o.isMask.call(v,x,k.keepStatic&&0===x))for(var T=x+1,A=o.seekNext.call(v,x,!1,0!==x);T<=A;T++)if(!1!==(S=E(T,t,a))){S=h.call(v,x,void 0!==S.pos?S.pos:T)||S,x=T;break}}}else S={caret:o.seekNext.call(v,x)}}}else S=!1;!1!==S||!k.keepStatic||!c.call(v,o.getBuffer.call(v))&&0!==x||a||!0===r?b(e)&&y.tests[x]&&y.tests[x].length>1&&k.keepStatic&&!a&&!0!==r&&(S=l.call(v,!0)):S=l.call(v,x,t,a,i,void 0,e),!0===S&&(S={pos:x})}if("function"==typeof k.postValidation&&!0!==i&&!0!==s){var C=k.postValidation.call(v,o.getBuffer.call(v,!0),void 0!==e.begin?v.isRTL?e.end:e.begin:e,t,S,k,y,a,d);void 0!==C&&(S=!0===C?S:C)}return S&&void 0===S.pos&&(S.pos=x),!1===S||!0===s?(o.resetMaskSet.call(v,!0),y.validPositions=g.extend(!0,{},_)):h.call(v,void 0,x,!0),P(S)}function d(e,t,a){for(var i=this.maskset,r=!1,o=n.getTests.call(this,e),s=0;s<o.length;s++){if(o[s].match&&(o[s].match.nativeDef===t.match[a.shiftPositions?"def":"nativeDef"]&&(!a.shiftPositions||!t.match.static)||o[s].match.nativeDef===t.match.nativeDef||a.regex&&!o[s].match.static&&o[s].match.fn.test(t.input))){r=!0;break}if(o[s].match&&o[s].match.def===t.match.nativeDef){r=void 0;break}}return!1===r&&void 0!==i.jitOffset[e]&&(r=d.call(this,e+i.jitOffset[e],t,a)),r}function p(e,t,a){var i,n,r=this,l=this.maskset,u=this.opts,c=this.dependencyLib,f=u.skipOptionalPartCharacter,d=r.isRTL?a.slice().reverse():a;if(u.skipOptionalPartCharacter="",!0===e)o.resetMaskSet.call(r),l.tests={},e=0,t=a.length,n=o.determineNewCaretPosition.call(r,{begin:0,end:0},!1).begin;else{for(i=e;i<t;i++)delete l.validPositions[i];n=e}var p=new c.Event("keypress");for(i=e;i<t;i++){p.which=d[i].toString().charCodeAt(0),r.ignorable=!1;var h=s.EventHandlers.keypressEvent.call(r,p,!0,!1,!1,n);!1!==h&&void 0!==h&&(n=h.forwardPosition)}u.skipOptionalPartCharacter=f}function h(e,t,a){var i=this,r=this.maskset,s=this.dependencyLib;if(void 0===e)for(e=t-1;e>0&&!r.validPositions[e];e--);for(var l=e;l<t;l++){if(void 0===r.validPositions[l]&&!o.isMask.call(i,l,!1))if(0==l?n.getTest.call(i,l):r.validPositions[l-1]){var u=n.getTests.call(i,l).slice();""===u[u.length-1].match.def&&u.pop();var c,d=n.determineTestTemplate.call(i,l,u);if(d&&(!0!==d.match.jit||"master"===d.match.newBlockMarker&&(c=r.validPositions[l+1])&&!0===c.match.optionalQuantifier)&&((d=s.extend({},d,{input:n.getPlaceholder.call(i,l,d.match,!0)||d.match.def})).generatedInput=!0,m.call(i,l,d,!0),!0!==a)){var p=r.validPositions[t].input;return r.validPositions[t]=void 0,f.call(i,t,p,!0,!0)}}}}function m(e,t,a,i){var r=this,s=this.maskset,l=this.opts,u=this.dependencyLib;function c(e,t,a){var i=t[e];if(void 0!==i&&!0===i.match.static&&!0!==i.match.optionality&&(void 0===t[0]||void 0===t[0].alternation)){var n=a.begin<=e-1?t[e-1]&&!0===t[e-1].match.static&&t[e-1]:t[e-1],r=a.end>e+1?t[e+1]&&!0===t[e+1].match.static&&t[e+1]:t[e+1];return n&&r}return!1}var p=0,h=void 0!==e.begin?e.begin:e,m=void 0!==e.end?e.end:e;if(e.begin>e.end&&(h=e.end,m=e.begin),i=void 0!==i?i:h,h!==m||l.insertMode&&void 0!==s.validPositions[i]&&void 0===a||void 0===t){var v,g=u.extend(!0,{},s.validPositions),k=o.getLastValidPosition.call(r,void 0,!0);for(s.p=h,v=k;v>=h;v--)delete s.validPositions[v],void 0===t&&delete s.tests[v+1];var y,b,x=!0,P=i,E=P;for(t&&(s.validPositions[i]=u.extend(!0,{},t),E++,P++),v=t?m:m-1;v<=k;v++){if(void 0!==(y=g[v])&&!0!==y.generatedInput&&(v>=m||v>=h&&c(v,g,{begin:h,end:m}))){for(;""!==n.getTest.call(r,E).match.def;){if(!1!==(b=d.call(r,E,y,l))||"+"===y.match.def){"+"===y.match.def&&o.getBuffer.call(r,!0);var S=f.call(r,E,y.input,"+"!==y.match.def,"+"!==y.match.def);if(x=!1!==S,P=(S.pos||E)+1,!x&&b)break}else x=!1;if(x){void 0===t&&y.match.static&&v===e.begin&&p++;break}if(!x&&E>s.maskLength)break;E++}""==n.getTest.call(r,E).match.def&&(x=!1),E=P}if(!x)break}if(!x)return s.validPositions=u.extend(!0,{},g),o.resetMaskSet.call(r,!0),!1}else t&&n.getTest.call(r,i).match.cd===t.match.cd&&(s.validPositions[i]=u.extend(!0,{},t));return o.resetMaskSet.call(r,!0),p}},8254:function(t){t.exports=e}},a={};function i(e){var n=a[e];if(void 0!==n)return n.exports;var r=a[e]={exports:{}};return t[e](r,r.exports,i),r.exports}var n={};return function(){var e=n;Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t,a=(t=i(3046))&&t.__esModule?t:{default:t};i(443);var r=a.default;e.default=r}(),n}()}));
//# sourceMappingURL=jquery.inputmask.min.js.map
|
* dist/jquery.inputmask.min
* https://github.com/RobinHerbots/Inputmask
* Copyright (c) 2010 - 2021 Robin Herbots
* Licensed under the MIT license
|
BorrowRequest.ts
|
import { BigNumber } from "@0x/utils";
import { Asset } from "./Asset";
import { WalletType } from "./WalletType";
export class
|
{
public walletType: WalletType;
public borrowAsset: Asset;
public borrowAmount: BigNumber;
public collateralAsset: Asset;
public depositAmount: BigNumber;
constructor(walletType: WalletType, borrowAsset: Asset, borrowAmount: BigNumber, collateralAsset: Asset, depositAmount: BigNumber) {
this.walletType = walletType;
this.borrowAsset = borrowAsset;
this.borrowAmount = borrowAmount;
this.collateralAsset = collateralAsset;
this.depositAmount = depositAmount;
}
}
|
BorrowRequest
|
pocketItemSanitizer.ts
|
import { RawPocketItem } from "./rawPocketItem";
import { PocketItem } from "./pocketItem";
function parseStatus(status: string): "normal" | "archived" | "deleted" {
switch (status) {
case "0":
return "normal";
case "1":
return "archived";
case "2":
return "deleted";
default:
throw new Error(`invalid status: ${status}`);
}
}
function parseDate(unixTimeStampString: string): Date {
return new Date(parseInt(unixTimeStampString, 10) * 1000);
}
function
|
(unixTimeStampString: string): Date | null {
return (unixTimeStampString === "0") ? null : parseDate(unixTimeStampString as string);
}
function parseHasVideo(hasVideo: string): "none" | "has in" | "is" {
if (hasVideo === undefined) {
return "none";
}
switch (hasVideo) {
case "0":
return "none";
case "1":
return "has in";
case "2":
return "is";
default:
throw new Error(`invalid has_video: ${hasVideo}`);
}
}
function parseHasImage(hasImage: string): "none" | "has in" | "is" {
if (hasImage === undefined) {
return "none";
}
switch (hasImage) {
case "0":
return "none";
case "1":
return "has in";
case "2":
return "is";
default:
throw new Error(`invalid has_image: ${hasImage}`);
}
}
export function sanitize(raw: RawPocketItem): PocketItem {
if (raw.item_id === undefined) {
throw new Error("item_id is undefined");
}
if (raw.resolved_id === undefined) {
throw new Error("resolved_id is undefined");
}
if (raw.given_url === undefined) {
throw new Error("given_url is undefined");
}
if (raw.given_title === undefined) {
throw new Error("given_title is undefined");
}
if (raw.favorite === undefined) {
throw new Error("favorite is undefined");
}
if (raw.status === undefined) {
throw new Error("status is undefined");
}
if (raw.time_added === undefined) {
throw new Error("time_added is undefined");
}
if (raw.time_updated === undefined) {
throw new Error("time_updated is undefined");
}
if (raw.time_read === undefined) {
throw new Error("time_read is undefined");
}
if (raw.time_favorited === undefined) {
throw new Error("time_favorited is undefined");
}
if (raw.sort_id === undefined) {
throw new Error("sort_id is undefined");
}
if (raw.resolved_title === undefined) {
throw new Error("resolved_title is undefined");
}
if (raw.resolved_url === undefined) {
throw new Error("resolved_url is undefined");
}
if (raw.excerpt === undefined) {
throw new Error("excerpt is undefined");
}
if (raw.is_article === undefined) {
throw new Error("is_article is undefined");
}
if (raw.is_index === undefined) {
throw new Error("is_index is undefined");
}
if (raw.has_video === undefined) {
throw new Error("has_video is undefined");
}
if (raw.has_image === undefined) {
throw new Error("has_image is undefined");
}
if (raw.word_count === undefined) {
throw new Error("word_count is undefined");
}
const itemId = parseInt(raw.item_id, 10);
const resolvedId = parseInt(raw.resolved_id, 10);
const givenUrl = raw.given_url;
const givenTitle = raw.given_title;
const favorite = (raw.favorite === "1");
const status = parseStatus(raw.status);
const timeAdded = parseDate(raw.time_added);
const timeUpdated = parseDate(raw.time_updated);
const timeRead = parseNullableDate(raw.time_read);
const timeFavorited = parseNullableDate(raw.time_favorited);
const sortId = raw.sort_id;
const resolvedTitle = raw.resolved_title;
const resolvedUrl = raw.resolved_url;
const excerpt = raw.excerpt;
const isArticle = (raw.is_article === "1");
const isIndex = raw.is_index;
const hasVideo = parseHasVideo(raw.has_video);
const hasImage = parseHasImage(raw.has_image);
const wordCount = parseInt(raw.word_count, 10);
const rawData = raw;
return new PocketItem({
itemId,
resolvedId,
givenUrl,
givenTitle,
favorite,
status,
timeAdded,
timeUpdated,
timeRead,
timeFavorited,
sortId,
resolvedTitle,
resolvedUrl,
excerpt,
isArticle,
isIndex,
hasVideo,
hasImage,
wordCount,
rawData,
});
}
|
parseNullableDate
|
Checkout.js
|
'use strict'
class Checkout {
constructor () {
const $ = document.querySelector.bind(document)
// props
this.cardNumber = $('#numero-do-cartao-front')
this.cardNumberBack = $('#numero-do-cartao-back')
this.cardName = $('#nome-front')
this.cardNameBack = $('#nome-back')
this.cardDate = $('#data-front')
this.cardDateBack = $('#data-back')
this.cardBack = $('#cartao-back')
this.areaTitle = $('#titulo-area')
this.cardAreaTitle = $('#card-area-titulo')
this.areaButton = $('#button-area')
this.flipper = $('#flipper')
this.littleMachine = $('#maquininha')
this.niceEdge = $('#bordinha-nice')
this.successFeedback = $('#feedback-sucesso')
this.total = $('#total')
this.willDisappearAll = document.querySelectorAll('.vai-sumir')
this.willDisappear = $('.vai-sumir')
// methods
this.scrolTop = this.scrolTop.bind(this)
this.secondStep = this.secondStep.bind(this)
this.submit = this.submit.bind(this)
}
scrolTop () {
window.scroll({
top: 0,
left: 0,
behavior: 'smooth'
})
}
secondStep () {
this.littleMachine.addEventListener('transitionend', () => {
this.littleMachine.style.cssText = `
height:auto;
z-index: 999;
padding: 50px 35px 90px 35px;
width: 80%;
margin:0px auto;
background: #362563;
background: -moz-linear-gradient(360deg, #362563 0%, #4b2563 100%);
background: -webkit-linear-gradient(360deg, #362563 0%,#4b2563 100%);
background: linear-gradient(360deg, #362563 0%,#4b2563 100%);
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#362563', endColorstr='#4b2563',GradientType=1 );
border-bottom: 6px solid #20163d;
box-shadow: 0px 4px 8px rgba(0, 0, 0, 0.45);
`
this.niceEdge.style.opacity = '1'
})
this.flipper.addEventListener('animationend', () => {
this.successFeedback.classList.add('confirmado')
})
}
submit () {
this.littleMachine.style.height = `${this.littleMachine.offsetHeight}px`
this.scrolTop()
|
this.cardDateBack.insertAdjacentHTML('afterbegin', this.cardDate.value)
this.cardAreaTitle.classList.add('fadeOut')
this.areaTitle.classList.add('fadeOut')
this.areaButton.classList.add('fadeOut')
this.flipper.classList.add('anima-flipper')
this.cardBack.classList.add('anima-cartao-back')
this.littleMachine.classList.add('animacao-maquininha')
this.cardBack.addEventListener('animationend', () => {
this.willDisappearAll.forEach(item => {
item.style.animation = 'vaiSumir 300ms both'
})
this.willDisappear.addEventListener('animationend', () => {
this.willDisappearAll.forEach(item => item.remove())
this.total.style.cssText = 'padding:15px; border:2px solid rgba(255,255,255,0.7); border-radius:4px;'
this.littleMachine.style.height = '100px'
this.secondStep()
}, false)
}, false)
}
}
export default Checkout
|
this.cardNumberBack.insertAdjacentHTML('afterbegin', this.cardNumber.value)
this.cardNameBack.insertAdjacentHTML('afterbegin', this.cardName.value.toUpperCase())
|
bfast_wrapper.py
|
#!/usr/bin/env python
"""
Runs BFAST on single-end or paired-end data.
TODO: more documentation
TODO:
- auto-detect gzip or bz2
- split options (?)
- queue lengths (?)
- assumes reference always has been indexed
- main and secondary indexes
- scoring matrix file ?
- read group file ?
usage: bfast_wrapper.py [options]
-r, --ref=r: The reference genome to use or index
-f, --fastq=f: The fastq file to use for the mapping
-F, --output=u: The file to save the output (SAM format)
-s, --fileSource=s: Whether to use a previously indexed reference sequence or one from history (indexed or history)
-p, --params=p: Parameter setting to use (pre_set or full)
-n, --numThreads=n: The number of threads to use
-A, --space=A: The encoding space (0: base 1: color)
-o, --offsets=o: The offsets for 'match'
-l, --loadAllIndexes=l: Load all indexes into memory
-k, --keySize=k: truncate key size in 'match'
-K, --maxKeyMatches=K: the maximum number of matches to allow before a key is ignored
-M, --maxNumMatches=M: the maximum number of matches to allow before the read is discarded
-w, --whichStrand=w: the strands to consider (0: both 1: forward 2: reverse)
-t, --timing=t: output timing information to stderr
-u, --ungapped=u: performed ungapped local alignment
-U, --unconstrained=U: performed local alignment without mask constraints
-O, --offset=O: the number of bases before and after each hit to consider in local alignment
-q, --avgMismatchQuality=q: average mismatch quality
-a, --algorithm=a: post processing algorithm (0: no filtering, 1: all passing filters, 2: unique, 3: best scoring unique, 4: best score all)
-P, --disallowPairing=P: do not choose alignments based on pairing
-R, --reverse=R: paired end reads are given on reverse strands
-z, --random=z: output a random best scoring alignment
-D, --dbkey=D: Dbkey for reference genome
-H, --suppressHeader=H: Suppress the sam header
"""
import optparse
import os
import shutil
import subprocess
import sys
import tempfile
def stop_err( msg ):
|
def __main__():
parser = optparse.OptionParser()
parser.add_option( '-r', '--ref', dest='ref', help='The reference genome to index and use' )
parser.add_option( '-f', '--fastq', dest='fastq', help='The fastq file to use for the mapping' )
parser.add_option( '-F', '--output', dest='output', help='The file to save the output (SAM format)' )
parser.add_option( '-A', '--space', dest='space', type="choice", default='0', choices=('0', '1'), help='The encoding space (0: base 1: color)' )
parser.add_option( '-H', '--suppressHeader', action="store_true", dest='suppressHeader', default=False, help='Suppress header' )
parser.add_option( '-n', '--numThreads', dest='numThreads', type="int", default="1", help='The number of threads to use' )
parser.add_option( '-t', '--timing', action="store_true", default=False, dest='timing', help='output timming information to stderr' )
parser.add_option( '-l', '--loadAllIndexes', action="store_true", default=False, dest='loadAllIndexes', help='Load all indexes into memory' )
parser.add_option( '-m', '--indexMask', dest='indexMask', help='String containing info on how to build custom indexes' )
parser.add_option( "-b", "--buildIndex", action="store_true", dest="buildIndex", default=False, help='String containing info on how to build custom indexes' )
parser.add_option( "--indexRepeatMasker", action="store_true", dest="indexRepeatMasker", default=False, help='Do not index lower case sequences. Such as those created by RepeatMasker' )
parser.add_option( '--indexContigOptions', dest='indexContigOptions', default="", help='The contig range options to use for the indexing' )
parser.add_option( '--indexExonsFileName', dest='indexExonsFileName', default="", help='The exons file to use for the indexing' )
parser.add_option( '-o', '--offsets', dest='offsets', default="", help='The offsets for \'match\'' )
parser.add_option( '-k', '--keySize', dest='keySize', type="int", default="-1", help='truncate key size in \'match\'' )
parser.add_option( '-K', '--maxKeyMatches', dest='maxKeyMatches', type="int", default="-1", help='the maximum number of matches to allow before a key is ignored' )
parser.add_option( '-M', '--maxNumMatches', dest='maxNumMatches', type="int", default="-1", help='the maximum number of matches to allow bfore the read is discarded' )
parser.add_option( '-w', '--whichStrand', dest='whichStrand', type="choice", default='0', choices=('0', '1', '2'), help='the strands to consider (0: both 1: forward 2: reverse)' )
parser.add_option( '--scoringMatrixFileName', dest='scoringMatrixFileName', help='Scoring Matrix file used to score the alignments' )
parser.add_option( '-u', '--ungapped', dest='ungapped', action="store_true", default=False, help='performed ungapped local alignment' )
parser.add_option( '-U', '--unconstrained', dest='unconstrained', action="store_true", default=False, help='performed local alignment without mask constraints' )
parser.add_option( '-O', '--offset', dest='offset', type="int", default="0", help='the number of bases before and after each hit to consider in local alignment' )
parser.add_option( '-q', '--avgMismatchQuality', type="int", default="-1", dest='avgMismatchQuality', help='average mismatch quality' )
parser.add_option( '-a', '--algorithm', dest='algorithm', default='0', type="choice", choices=('0', '1', '2', '3', '4'), help='post processing algorithm (0: no filtering, 1: all passing filters, 2: unique, 3: best scoring unique, 4: best score all' )
parser.add_option( '--unpaired', dest='unpaired', action="store_true", default=False, help='do not choose alignments based on pairing' )
parser.add_option( '--reverseStrand', dest='reverseStrand', action="store_true", default=False, help='paired end reads are given on reverse strands' )
parser.add_option( '--pairedEndInfer', dest='pairedEndInfer', action="store_true", default=False, help='break ties when one end of a paired end read by estimating the insert size distribution' )
parser.add_option( '--randomBest', dest='randomBest', action="store_true", default=False, help='output a random best scoring alignment' )
(options, args) = parser.parse_args()
# output version # of tool
try:
tmp = tempfile.NamedTemporaryFile().name
tmp_stdout = open( tmp, 'wb' )
proc = subprocess.Popen( args='bfast 2>&1', shell=True, stdout=tmp_stdout )
tmp_stdout.close()
returncode = proc.wait()
stdout = None
for line in open( tmp_stdout.name, 'rb' ):
if line.lower().find( 'version' ) >= 0:
stdout = line.strip()
break
if stdout:
sys.stdout.write( '%s\n' % stdout )
else:
raise Exception
except:
sys.stdout.write( 'Could not determine BFAST version\n' )
buffsize = 1048576
# make temp directory for bfast, requires trailing slash
tmp_dir = '%s/' % tempfile.mkdtemp()
# 'generic' options used in all bfast commands here
if options.timing:
all_cmd_options = "-t"
else:
all_cmd_options = ""
try:
if options.buildIndex:
reference_filepath = tempfile.NamedTemporaryFile( dir=tmp_dir, suffix='.fa' ).name
# build bfast indexes
os.symlink( options.ref, reference_filepath )
# bfast fast2brg
try:
nuc_space = [ "0" ]
if options.space == "1":
# color space localalign appears to require nuc space brg
nuc_space.append( "1" )
for space in nuc_space:
cmd = 'bfast fasta2brg -f "%s" -A "%s" %s' % ( reference_filepath, space, all_cmd_options )
tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
tmp_stderr = open( tmp, 'wb' )
proc = subprocess.Popen( args=cmd, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
returncode = proc.wait()
tmp_stderr.close()
# get stderr, allowing for case where it's very large
tmp_stderr = open( tmp, 'rb' )
stderr = ''
try:
while True:
stderr += tmp_stderr.read( buffsize )
if not stderr or len( stderr ) % buffsize != 0:
break
except OverflowError:
pass
tmp_stderr.close()
if returncode != 0:
raise Exception(stderr)
except Exception as e:
raise Exception('Error in \'bfast fasta2brg\'.\n' + str( e ))
# bfast index
try:
all_index_cmds = 'bfast index %s -f "%s" -A "%s" -n "%s"' % ( all_cmd_options, reference_filepath, options.space, options.numThreads )
if options.indexRepeatMasker:
all_index_cmds += " -R"
if options.indexContigOptions:
index_contig_options = [ int(_) for _ in options.indexContigOptions.split( ',' ) ]
if index_contig_options[0] >= 0:
all_index_cmds += ' -s "%s"' % index_contig_options[0]
if index_contig_options[1] >= 0:
all_index_cmds += ' -S "%s"' % index_contig_options[1]
if index_contig_options[2] >= 0:
all_index_cmds += ' -e "%s"' % index_contig_options[2]
if index_contig_options[3] >= 0:
all_index_cmds += ' -E "%s"' % index_contig_options[3]
elif options.indexExonsFileName:
all_index_cmds += ' -x "%s"' % options.indexExonsFileName
index_count = 1
for mask, hash_width in [ mask.split( ':' ) for mask in options.indexMask.split( ',' ) ]:
cmd = '%s -m "%s" -w "%s" -i "%i"' % ( all_index_cmds, mask, hash_width, index_count )
tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
tmp_stderr = open( tmp, 'wb' )
proc = subprocess.Popen( args=cmd, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
returncode = proc.wait()
tmp_stderr.close()
# get stderr, allowing for case where it's very large
tmp_stderr = open( tmp, 'rb' )
stderr = ''
try:
while True:
stderr += tmp_stderr.read( buffsize )
if not stderr or len( stderr ) % buffsize != 0:
break
except OverflowError:
pass
tmp_stderr.close()
if returncode != 0:
raise Exception(stderr)
index_count += 1
except Exception as e:
raise Exception('Error in \'bfast index\'.\n' + str( e ))
else:
reference_filepath = options.ref
assert reference_filepath and os.path.exists( reference_filepath ), 'A valid genome reference was not provided.'
# set up aligning and generate aligning command options
# set up temp output files
tmp_bmf = tempfile.NamedTemporaryFile( dir=tmp_dir )
tmp_bmf_name = tmp_bmf.name
tmp_bmf.close()
tmp_baf = tempfile.NamedTemporaryFile( dir=tmp_dir )
tmp_baf_name = tmp_baf.name
tmp_baf.close()
bfast_match_cmd = 'bfast match -f "%s" -r "%s" -n "%s" -A "%s" -T "%s" -w "%s" %s' % ( reference_filepath, options.fastq, options.numThreads, options.space, tmp_dir, options.whichStrand, all_cmd_options )
bfast_localalign_cmd = 'bfast localalign -f "%s" -m "%s" -n "%s" -A "%s" -o "%s" %s' % ( reference_filepath, tmp_bmf_name, options.numThreads, options.space, options.offset, all_cmd_options )
bfast_postprocess_cmd = 'bfast postprocess -O 1 -f "%s" -i "%s" -n "%s" -A "%s" -a "%s" %s' % ( reference_filepath, tmp_baf_name, options.numThreads, options.space, options.algorithm, all_cmd_options )
if options.offsets:
bfast_match_cmd += ' -o "%s"' % options.offsets
if options.keySize >= 0:
bfast_match_cmd += ' -k "%s"' % options.keySize
if options.maxKeyMatches >= 0:
bfast_match_cmd += ' -K "%s"' % options.maxKeyMatches
if options.maxNumMatches >= 0:
bfast_match_cmd += ' -M "%s"' % options.maxNumMatches
bfast_localalign_cmd += ' -M "%s"' % options.maxNumMatches
if options.scoringMatrixFileName:
bfast_localalign_cmd += ' -x "%s"' % options.scoringMatrixFileName
bfast_postprocess_cmd += ' -x "%s"' % options.scoringMatrixFileName
if options.ungapped:
bfast_localalign_cmd += ' -u'
if options.unconstrained:
bfast_localalign_cmd += ' -U'
if options.avgMismatchQuality >= 0:
bfast_localalign_cmd += ' -q "%s"' % options.avgMismatchQuality
bfast_postprocess_cmd += ' -q "%s"' % options.avgMismatchQuality
if options.algorithm == 3:
if options.pairedEndInfer:
bfast_postprocess_cmd += ' -P'
if options.randomBest:
bfast_postprocess_cmd += ' -z'
if options.unpaired:
bfast_postprocess_cmd += ' -U'
if options.reverseStrand:
bfast_postprocess_cmd += ' -R'
# instead of using temp files, should we stream through pipes?
bfast_match_cmd += " > %s" % tmp_bmf_name
bfast_localalign_cmd += " > %s" % tmp_baf_name
bfast_postprocess_cmd += " > %s" % options.output
# need to nest try-except in try-finally to handle 2.4
try:
# bfast 'match'
try:
tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
tmp_stderr = open( tmp, 'wb' )
proc = subprocess.Popen( args=bfast_match_cmd, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
returncode = proc.wait()
tmp_stderr.close()
# get stderr, allowing for case where it's very large
tmp_stderr = open( tmp, 'rb' )
stderr = ''
try:
while True:
stderr += tmp_stderr.read( buffsize )
if not stderr or len( stderr ) % buffsize != 0:
break
except OverflowError:
pass
tmp_stderr.close()
if returncode != 0:
raise Exception(stderr)
except Exception as e:
raise Exception('Error in \'bfast match\'. \n' + str( e ))
# bfast 'localalign'
try:
tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
tmp_stderr = open( tmp, 'wb' )
proc = subprocess.Popen( args=bfast_localalign_cmd, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
returncode = proc.wait()
tmp_stderr.close()
# get stderr, allowing for case where it's very large
tmp_stderr = open( tmp, 'rb' )
stderr = ''
try:
while True:
stderr += tmp_stderr.read( buffsize )
if not stderr or len( stderr ) % buffsize != 0:
break
except OverflowError:
pass
tmp_stderr.close()
if returncode != 0:
raise Exception(stderr)
except Exception as e:
raise Exception('Error in \'bfast localalign\'. \n' + str( e ))
# bfast 'postprocess'
try:
tmp = tempfile.NamedTemporaryFile( dir=tmp_dir ).name
tmp_stderr = open( tmp, 'wb' )
proc = subprocess.Popen( args=bfast_postprocess_cmd, shell=True, cwd=tmp_dir, stderr=tmp_stderr.fileno() )
returncode = proc.wait()
tmp_stderr.close()
# get stderr, allowing for case where it's very large
tmp_stderr = open( tmp, 'rb' )
stderr = ''
try:
while True:
stderr += tmp_stderr.read( buffsize )
if not stderr or len( stderr ) % buffsize != 0:
break
except OverflowError:
pass
tmp_stderr.close()
if returncode != 0:
raise Exception(stderr)
except Exception as e:
raise Exception('Error in \'bfast postprocess\'. \n' + str( e ))
# remove header if necessary
if options.suppressHeader:
tmp_out = tempfile.NamedTemporaryFile( dir=tmp_dir)
tmp_out_name = tmp_out.name
tmp_out.close()
try:
shutil.move( options.output, tmp_out_name )
except Exception as e:
raise Exception('Error moving output file before removing headers. \n' + str( e ))
fout = open( options.output, 'w' )
for line in open( tmp_out.name, 'r' ):
if len( line ) < 3 or line[0:3] not in [ '@HD', '@SQ', '@RG', '@PG', '@CO' ]:
fout.write( line )
fout.close()
# check that there are results in the output file
if os.path.getsize( options.output ) > 0:
if "0" == options.space:
sys.stdout.write( 'BFAST run on Base Space data' )
else:
sys.stdout.write( 'BFAST run on Color Space data' )
else:
raise Exception('The output file is empty. You may simply have no matches, or there may be an error with your input file or settings.')
except Exception as e:
stop_err( 'The alignment failed.\n' + str( e ) )
finally:
# clean up temp dir
if os.path.exists( tmp_dir ):
shutil.rmtree( tmp_dir )
if __name__ == "__main__":
__main__()
|
sys.stderr.write( '%s\n' % msg )
sys.exit()
|
get_teams_team_activity_detail_with_date_request_builder.go
|
package getteamsteamactivitydetailwithdate
import (
i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go"
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// GetTeamsTeamActivityDetailWithDateRequestBuilder provides operations to call the getTeamsTeamActivityDetail method.
type GetTeamsTeamActivityDetailWithDateRequestBuilder struct {
// Path parameters for the request
pathParameters map[string]string
// The request adapter to use to execute the requests.
requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter
// Url template to use to build the URL for the current request builder
urlTemplate string
}
// GetTeamsTeamActivityDetailWithDateRequestBuilderGetRequestConfiguration configuration for the request such as headers, query parameters, and middleware options.
type GetTeamsTeamActivityDetailWithDateRequestBuilderGetRequestConfiguration struct {
// Request headers
Headers map[string]string
// Request options
Options []i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestOption
}
// NewGetTeamsTeamActivityDetailWithDateRequestBuilderInternal instantiates a new GetTeamsTeamActivityDetailWithDateRequestBuilder and sets the default values.
func NewGetTeamsTeamActivityDetailWithDateRequestBuilderInternal(pathParameters map[string]string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter, date *i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.DateOnly)(*GetTeamsTeamActivityDetailWithDateRequestBuilder)
|
// NewGetTeamsTeamActivityDetailWithDateRequestBuilder instantiates a new GetTeamsTeamActivityDetailWithDateRequestBuilder and sets the default values.
func NewGetTeamsTeamActivityDetailWithDateRequestBuilder(rawUrl string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter)(*GetTeamsTeamActivityDetailWithDateRequestBuilder) {
urlParams := make(map[string]string)
urlParams["request-raw-url"] = rawUrl
return NewGetTeamsTeamActivityDetailWithDateRequestBuilderInternal(urlParams, requestAdapter, nil)
}
// CreateGetRequestInformation invoke function getTeamsTeamActivityDetail
func (m *GetTeamsTeamActivityDetailWithDateRequestBuilder) CreateGetRequestInformation()(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
return m.CreateGetRequestInformationWithRequestConfiguration(nil);
}
// CreateGetRequestInformationWithRequestConfiguration invoke function getTeamsTeamActivityDetail
func (m *GetTeamsTeamActivityDetailWithDateRequestBuilder) CreateGetRequestInformationWithRequestConfiguration(requestConfiguration *GetTeamsTeamActivityDetailWithDateRequestBuilderGetRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.GET
if requestConfiguration != nil {
requestInfo.AddRequestHeaders(requestConfiguration.Headers)
requestInfo.AddRequestOptions(requestConfiguration.Options)
}
return requestInfo, nil
}
// Get invoke function getTeamsTeamActivityDetail
func (m *GetTeamsTeamActivityDetailWithDateRequestBuilder) Get()(GetTeamsTeamActivityDetailWithDateResponseable, error) {
return m.GetWithRequestConfigurationAndResponseHandler(nil, nil);
}
// GetWithRequestConfigurationAndResponseHandler invoke function getTeamsTeamActivityDetail
func (m *GetTeamsTeamActivityDetailWithDateRequestBuilder) GetWithRequestConfigurationAndResponseHandler(requestConfiguration *GetTeamsTeamActivityDetailWithDateRequestBuilderGetRequestConfiguration, responseHandler i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ResponseHandler)(GetTeamsTeamActivityDetailWithDateResponseable, error) {
requestInfo, err := m.CreateGetRequestInformationWithRequestConfiguration(requestConfiguration);
if err != nil {
return nil, err
}
res, err := m.requestAdapter.SendAsync(requestInfo, CreateGetTeamsTeamActivityDetailWithDateResponseFromDiscriminatorValue, responseHandler, nil)
if err != nil {
return nil, err
}
return res.(GetTeamsTeamActivityDetailWithDateResponseable), nil
}
|
{
m := &GetTeamsTeamActivityDetailWithDateRequestBuilder{
}
m.urlTemplate = "{+baseurl}/reports/microsoft.graph.getTeamsTeamActivityDetail(date={date})";
urlTplParams := make(map[string]string)
for idx, item := range pathParameters {
urlTplParams[idx] = item
}
if date != nil {
urlTplParams[""] = (*date).String()
}
m.pathParameters = urlTplParams;
m.requestAdapter = requestAdapter;
return m
}
|
arrays.rs
|
// Arrays - Fixed list where elements are the same data types
pub fn run()
|
{
let mut numbers: [i32; 5] = [1, 2, 3, 4, 5];
// Re-assign value
numbers[2] = 20;
println!("{:?}", numbers);
// Get single val
println!("Single Value: {}", numbers[0]);
// Get array length
println!("Array length: {}", numbers.len());
// Arrays are stack(栈) allocated(分配)
println!("Array occuopies {} bytes", std::mem::size_of_val(&numbers));
// Get Slice
let slice: &[i32] = &numbers[0..2];
println!("Slice: {:?}", slice);
}
|
|
Todo.js
|
import React from "react"
class Todo extends React.Component {
render() {
return (
<li>
<label>
<input
type="checkbox"
onChange={this.props.onClick}
checked={this.props.completed}
id={"checkbox"+this.props.key}
/>
{this.props.text}
</label>
</li>
)
}
}
|
Todo.propTypes = {
onClick : React.PropTypes.func.isRequired,
completed: React.PropTypes.bool.isRequired,
text : React.PropTypes.string.isRequired
}
export default Todo
| |
calendar_models_test.py
|
import sys
import os
from io import StringIO
from datetime import datetime
import unittest
from unittest.mock import patch
sys.path.append(os.path.abspath("./src/"))
from calendarApp.models import Event, Calendar
class CalendarModelTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.data1 = {
"name": "Test Event 1",
"start_time": "01/01/2000 00:00:00",
"end_time": "01/01/2001 00:00:00"
}
cls.data2 = {
"name": "Test Event 2",
"start_time": "01/01/2001 00:00:00",
"end_time": "01/01/2002 00:00:00"
}
@classmethod
def tearDownClass(cls):
del cls.data1
del cls.data2
def setUp(self):
|
def tearDown(self):
del self.calendar
def test_event_add(self):
# Test Configuration and Setup
with patch('sys.stdout', StringIO()) as print_output:
# Test Function
self.calendar.add_event(
self.data1["name"], self.data1["start_time"], self.data1["end_time"])
calendar_event = self.calendar.schedule[0]
# Test Assertions
self.assertEqual(
f"[INFO] Event {self.data1['name']} added", print_output.getvalue().rstrip())
self.assertEqual(self.data1["name"], calendar_event.name)
def test_event_delete(self):
# Test Configuration and Setup
self.calendar.schedule = [
Event(
self.data1["name"], self.data1["start_time"], self.data1["end_time"])
]
calendar_event = self.calendar.schedule[0]
with patch('sys.stdout', StringIO()) as print_output:
# Test Function
self.calendar.delete_event([str(calendar_event.id)])
# Test Assertions
self.assertEqual(
f"[INFO] Event(s) ['{calendar_event.id}'] removed", print_output.getvalue().rstrip())
self.assertFalse(self.calendar.schedule)
def test_event_order(self):
# Test Configuration and Setup
self.calendar.schedule = [
Event(
self.data2["name"], self.data2["start_time"], self.data2["end_time"]),
Event(
self.data1["name"], self.data1["start_time"], self.data1["end_time"])
]
# Test Function
self.calendar.order_events()
# Test Assertions
self.assertLess(
self.calendar.schedule[0].start_time, self.calendar.schedule[1].start_time)
def test_event_print(self):
# Test Configuration and Setup
self.calendar.schedule = [
Event(
self.data1["name"], self.data1["start_time"], self.data1["end_time"]),
Event(
self.data2["name"], self.data2["start_time"], self.data2["end_time"])
]
# Test Assertions
with patch('sys.stdout', StringIO()) as print_output:
self.calendar.print_events()
self.assertTrue(self.data1["name"] in print_output.getvalue())
self.assertTrue(self.data2["name"] in print_output.getvalue())
if __name__ == "__main__":
unittest.main()
|
self.calendar = Calendar("Test")
|
profile-user-social.component.ts
|
import { Component, OnInit } from '@angular/core';
import { Lightbox } from 'ngx-lightbox';
@Component({
selector: 'app-profile-user-social',
templateUrl: './profile-user-social.component.html'
})
export class
|
implements OnInit {
constructor(private lightbox: Lightbox) { }
ngOnInit() {
}
openLightbox(src: string): void {
this.lightbox.open([{ src, thumb: '' }], 0, { centerVertically: true, positionFromTop: 0, disableScrolling: true, wrapAround: true });
}
}
|
ProfileUserSocialComponent
|
dbtools_test.go
|
package dbtools
import (
"os"
"testing"
"github.com/stretchr/testify/require"
)
func TestCreateAndDropDB(t *testing.T)
|
{
if os.Getenv("POSTGRES_TEST") != "true" {
t.Skip("Skipping tests that requires postgres")
}
os.Setenv("ZEPTO_DB_ADAPTER", "postgres")
os.Setenv("ZEPTO_DB_HOST", "127.0.0.1")
os.Setenv("ZEPTO_DB_DATABASE", "mycustomdb")
os.Setenv("ZEPTO_DB_PORT", "15432")
os.Setenv("ZEPTO_DB_SSLMODE", "disable")
os.Setenv("ZEPTO_DB_USERNAME", "postgres")
os.Setenv("ZEPTO_DB_PASSWORD", "postgres")
dt, err := NewDBTools()
require.NoError(t, err)
require.NotNil(t, dt)
err = dt.CreateDB()
require.NoError(t, err)
err = dt.DropDB()
require.NoError(t, err)
}
|
|
network.py
|
from .e212_names import operators, countries
from .errors import InvalidNetwork, InvalidCountry
def network(mcc, mnc):
'''
Returns a tuple (country, network_name), with country specified as
ISO-3166-1 alpha-2 code.
'''
mcc = int(mcc)
mnc = int(mnc)
try:
return operators[mcc][mnc]
except:
raise InvalidNetwork('Invalid MCC {} MNC {}'.format(mcc, mnc))
def
|
(country):
'''
Returns a list of tuples (mcc, mnc, network_name) with all the networks
belonging to the specified country.
The country must be specified as an ISO-3166-1 alpha-2 code.
'''
try:
return [(m[0], m[1], operators[m[0]][m[1]][1])
for m in countries[country]]
except:
raise InvalidCountry('Invalid country {}'.format(country))
|
country_networks
|
transformer.go
|
package ports
import (
"fleet-formation/pkg/mission/service"
proto "github.com/Tomofiles/skysign_cloud_v2/skysign-proto/pkg/skysign_proto"
)
// MissionProtoTransformerFromModel .
func MissionProtoTransformerFromModel(
model service.MissionPresentationModel,
) *proto.Mission
|
{
mission := &proto.Mission{}
mission.Id = model.GetMission().GetID()
mission.Name = model.GetMission().GetName()
waypoints := []*proto.Waypoint{}
for _, w := range model.GetMission().GetNavigation().GetWaypoints() {
waypoints = append(
waypoints,
&proto.Waypoint{
Latitude: w.GetLatitude(),
Longitude: w.GetLongitude(),
RelativeHeight: w.GetRelativeHeight(),
Speed: w.GetSpeed(),
},
)
}
mission.Navigation = &proto.Navigation{
TakeoffPointGroundHeight: model.GetMission().GetNavigation().GetTakeoffPointGroundHeight(),
Waypoints: waypoints,
UploadId: model.GetMission().GetNavigation().GetUploadID(),
}
return mission
}
|
|
mdlp.py
|
"""
Minimum Description Length Principle (MDLP) binning
- Original paper: http://sci2s.ugr.es/keel/pdf/algorithm/congreso/fayyad1993.pdf
- Implementation inspiration: https://www.ibm.com/support/knowledgecenter/it/SSLVMB_21.0.0/com.ibm.spss.statistics.help/alg_optimal-binning.htm
"""
import collections
import math
import numpy as np
from scipy import stats
from sklearn.utils import check_X_y
from .base import BaseSupervisedBinner
class
|
(BaseSupervisedBinner):
def fit(self, X, y, **fit_params):
"""Determine which are the best cut points for each column in X based on y."""
X, y = check_X_y(X, y, y_numeric=True)
self.cut_points_ = [mdlp_cut(x, y, []) for x in X.T]
return self
@property
def cut_points(self):
return self.cut_points_
def calc_class_entropy(y):
class_counts = np.unique(y, return_counts=True)[1]
return stats.entropy(class_counts, base=2)
def calc_class_information_entropy(x, y, cut_point):
partition = x <= cut_point
y_1 = y[partition]
y_2 = y[~partition]
ent_1 = calc_class_entropy(y_1)
ent_2 = calc_class_entropy(y_2)
return (y_1.size * ent_1 + y_2.size * ent_2) / (y_1.size + y_2.size)
def mdlp_cut(x, y, cut_points):
# No cut is necessary if there is only one class
if len(np.unique(y)) == 1:
return
# Calculate the current entropy
y_ent = calc_class_entropy(y)
# Sort x and y according to x
sorted_indexes = x.argsort()
x = x[sorted_indexes]
y = y[sorted_indexes]
# Find the potential cut points
potential_cut_points = []
for i in range(x.size - 1):
potential_cut_points.append((x[i] + x[i+1]) / 2)
# Ignore the cut points that appear more than once
potential_cut_points = list(set(potential_cut_points))
# Find the cut point with gives the lowest class information entropy
cut_point = min(
potential_cut_points,
key=lambda cut_point: calc_class_information_entropy(x, y, cut_point)
)
# Calculate the information gain obtained with the obtained cut point
new_ent = calc_class_information_entropy(x, y, cut_point)
gain = y_ent - new_ent
# Partition the data
partition = x <= cut_point
x_1 = x[partition]
y_1 = y[partition]
x_2 = x[~partition]
y_2 = y[~partition]
# Get the number of unique classes in each group
k = len(np.unique(y))
k_1 = len(np.unique(y_1))
k_2 = len(np.unique(y_2))
# Calculate the entropy of each group
y_1_ent = calc_class_entropy(y_1)
y_2_ent = calc_class_entropy(y_2)
# Calculate the acceptance criterion
delta = math.log2(3 ** k) - k * y_ent + k_1 * y_1_ent + k_2 * y_2_ent
n = y.size
acceptance_criterion = (math.log2(n - 1) + delta) / n
# Add the cut point if the gain is higher than the acceptance criterion
if gain > acceptance_criterion:
cut_points.append(cut_point)
# Recursively check if further cuts are possible
mdlp_cut(x_1, y_1, cut_points)
mdlp_cut(x_2, y_2, cut_points)
return sorted(cut_points)
|
MDLPBinner
|
CardAccountDetails.tsx
|
import React, { Fragment } from 'react';
import { useIntl, connect } from 'umi';
import { Table, Card } from 'antd';
import { cardProps } from '@/pages/onnet-portal/core/utils/props';
import styles from '@/pages/onnet-portal/core/style.less';
const CardAccountDetails = (props) => {
const { lb_account = { data: {} } } = props;
const { formatMessage } = useIntl();
if (!lb_account.data.account_info) {
return null;
}
const tableData = [
{
key: 'CardAccountDetailsRowKey2',
name: 'Contact person',
value: lb_account.data.account_info.kont_person,
|
{
key: 'CardAccountDetailsRowKey3',
name: 'Email',
value: lb_account.data.account_info.emails ? (
<Fragment>
{lb_account.data.account_info.emails.map((email) => (
<Fragment key={`fragmentkey${email.replace(/[^A-Za-z0-9]/g, '')}`}>
<span key={`spankey${email.replace(/[^A-Za-z0-9]/g, '')}`}>{email} </span>
</Fragment>
))}
</Fragment>
) : null,
},
{
key: 'CardAccountDetailsRowKey4',
name: 'Phone',
value: lb_account.data.account_info.phones ? (
<Fragment>
{lb_account.data.account_info.phones.map((phone) => (
<Fragment key={`fragmentkey${phone.replace(/[^A-Za-z0-9]/g, '')}`}>
<span key={`spankey${phone.replace(/[^A-Za-z0-9]/g, '')}`}>{phone} </span>
</Fragment>
))}
</Fragment>
) : null,
},
];
const columns = [
{
title: 'Name',
dataIndex: 'name',
key: 'name',
width: '35%',
render: (text, row, index) => {
if (index === 0) {
return {
children: <span key={`name${index}`}>{text}</span>,
props: {
colSpan: 2,
},
};
}
return <span key={`name${index}`}>{text}</span>;
},
},
{
title: 'Value',
dataIndex: 'value',
key: 'value',
render: (text, row, index) => {
if (index === 0) {
return {
children: <a key={`value${index}`}>{text}</a>,
props: {
colSpan: 0,
},
};
}
return <a key={`value${index}`}>{text}</a>;
},
},
];
return (
<Card className={styles.card} {...cardProps}>
<Card.Meta
title={formatMessage({
id: 'reseller_portal.account_details',
defaultMessage: 'Account details',
})}
description={
<Table
dataSource={tableData}
columns={columns}
pagination={false}
showHeader={false}
size="small"
/>
}
/>
</Card>
);
};
export default connect(({ lb_account }) => ({
lb_account,
}))(CardAccountDetails);
|
},
|
simpleupload.js
|
/**
* @description
* 简单上传:点击按钮,直接选择文件上传
* @author Jinqn
* @date 2014-03-31
*/
UE.plugin.register('simpleupload', function (){
var me = this,
isLoaded = false,
containerBtn;
function initUploadBtn(){
var w = containerBtn.offsetWidth || 20,
h = containerBtn.offsetHeight || 20,
btnIframe = document.createElement('iframe'),
btnStyle = 'display:block;width:' + w + 'px;height:' + h + 'px;overflow:hidden;border:0;margin:0;padding:0;position:absolute;top:0;left:0;filter:alpha(opacity=0);-moz-opacity:0;-khtml-opacity: 0;opacity: 0;cursor:pointer;';
domUtils.on(btnIframe, 'load', function(){
var timestrap = (+new Date()).toString(36),
wrapper,
btnIframeDoc,
btnIframeBody;
btnIframeDoc = (btnIframe.contentDocument || btnIframe.contentWindow.document);
btnIframeBody = btnIframeDoc.body;
wrapper = btnIframeDoc.createElement('div');
wrapper.innerHTML = '<form id="edui_form_' + timestrap + '" target="edui_iframe_' + timestrap + '" method="POST" enctype="multipart/form-data" action="' + me.getOpt('serverUrl') + '" ' +
'style="' + btnStyle + '">' +
'<input id="edui_input_' + timestrap + '" type="file" accept="image/*" name="' + me.options.imageFieldName + '" ' +
'style="' + btnStyle + '">' +
'</form>' +
'<iframe id="edui_iframe_' + timestrap + '" name="edui_iframe_' + timestrap + '" style="display:none;width:0;height:0;border:0;margin:0;padding:0;position:absolute;"></iframe>';
wrapper.className = 'edui-' + me.options.theme;
wrapper.id = me.ui.id + '_iframeupload';
btnIframeBody.style.cssText = btnStyle;
btnIframeBody.style.width = w + 'px';
btnIframeBody.style.height = h + 'px';
btnIframeBody.appendChild(wrapper);
if (btnIframeBody.parentNode) {
btnIframeBody.parentNode.style.width = w + 'px';
btnIframeBody.parentNode.style.height = w + 'px';
}
var form = btnIframeDoc.getElementById('edui_form_' + timestrap);
var input = btnIframeDoc.getElementById('edui_input_' + timestrap);
var iframe = btnIframeDoc.getElementById('edui_iframe_' + timestrap);
domUtils.on(input, 'change', function(){
if(!input.value) return;
var loadingId = 'loading_' + (+new Date()).toString(36);
var params = utils.serializeParam(me.queryCommandValue('serverparam')) || '';
var imageActionUrl = me.getActionUrl(me.getOpt('imageActionName'));
var allowFiles = me.getOpt('imageAllowFiles');
me.focus();
me.execCommand('inserthtml', '<img class="loadingclass" id="' + loadingId + '" src="' + me.options.themePath + me.options.theme +'/images/spacer.gif" title="' + (me.getLang('simpleupload.loading') || '') + '" >');
function callback(){
|
var link, json, loader,
body = (iframe.contentDocument || iframe.contentWindow.document).body,
result = body.innerText || body.textContent || '';
json = (new Function("return " + result))();
link = me.options.imageUrlPrefix + json.url;
if(json.state == 'SUCCESS' && json.url) {
loader = me.document.getElementById(loadingId);
loader.setAttribute('src', link);
loader.setAttribute('_src', link);
loader.setAttribute('title', json.title || '');
loader.setAttribute('alt', json.original || '');
loader.removeAttribute('id');
domUtils.removeClasses(loader, 'loadingclass');
} else {
showErrorLoader && showErrorLoader(json.state);
}
}catch(er){
showErrorLoader && showErrorLoader(me.getLang('simpleupload.loadError'));
}
form.reset();
domUtils.un(iframe, 'load', callback);
}
function showErrorLoader(title){
if(loadingId) {
var loader = me.document.getElementById(loadingId);
loader && domUtils.remove(loader);
me.fireEvent('showmessage', {
'id': loadingId,
'content': title,
'type': 'error',
'timeout': 4000
});
}
}
/* 判断后端配置是否没有加载成功 */
if (!me.getOpt('imageActionName')) {
errorHandler(me.getLang('autoupload.errorLoadConfig'));
return;
}
// 判断文件格式是否错误
var filename = input.value,
fileext = filename ? filename.substr(filename.lastIndexOf('.')):'';
if (!fileext || (allowFiles && (allowFiles.join('') + '.').indexOf(fileext.toLowerCase() + '.') == -1)) {
showErrorLoader(me.getLang('simpleupload.exceedTypeError'));
return;
}
domUtils.on(iframe, 'load', callback);
form.action = utils.formatUrl(imageActionUrl + (imageActionUrl.indexOf('?') == -1 ? '?':'&') + params);
form.submit();
});
var stateTimer;
me.addListener('selectionchange', function () {
clearTimeout(stateTimer);
stateTimer = setTimeout(function() {
var state = me.queryCommandState('simpleupload');
if (state == -1) {
input.disabled = 'disabled';
} else {
input.disabled = false;
}
}, 400);
});
isLoaded = true;
});
btnIframe.style.cssText = btnStyle;
containerBtn.appendChild(btnIframe);
}
return {
bindEvents:{
'ready': function() {
//设置loading的样式
utils.cssRule('loading',
'.loadingclass{display:inline-block;cursor:default;background: url(\''
+ this.options.themePath
+ this.options.theme +'/images/loading.gif\') no-repeat center center transparent;border:1px solid #cccccc;margin-right:1px;height: 22px;width: 22px;}\n' +
'.loaderrorclass{display:inline-block;cursor:default;background: url(\''
+ this.options.themePath
+ this.options.theme +'/images/loaderror.png\') no-repeat center center transparent;border:1px solid #cccccc;margin-right:1px;height: 22px;width: 22px;' +
'}',
this.document);
},
/* 初始化简单上传按钮 */
'simpleuploadbtnready': function(type, container) {
containerBtn = container;
me.afterConfigReady(initUploadBtn);
}
},
outputRule: function(root){
utils.each(root.getNodesByTagName('img'),function(n){
if (/\b(loaderrorclass)|(bloaderrorclass)\b/.test(n.getAttr('class'))) {
n.parentNode.removeChild(n);
}
});
},
commands: {
'simpleupload': {
queryCommandState: function () {
return isLoaded ? 0:-1;
}
}
}
}
});
|
try{
|
main.py
|
#!/usr/bin/env python3
import argparse
import json
import logging
import multiprocessing
import shutil
import time
from conflation import aggregation, util
from conflation.map_matching import valhalla
from conflation.trace_fetching import mapillary, mapillary_v3, auth_server
def
|
():
arg_parser = argparse.ArgumentParser()
# TODO: Make this optional and do the planet if so?
arg_parser.add_argument(
"--bbox",
type=str,
help="Filter by the bounding box on the map, given as `min_longitude,min_latitude,max_longitude,max_latitude`",
required=True,
)
arg_parser.add_argument(
"--trace-config",
type=str,
help="JSON of configurable settings for where / how to pull the GPS trace. See .README for specific fields.",
required=True,
)
arg_parser.add_argument(
"--map-matching-config",
type=str,
help="JSON of configurable settings for where / how to perform map matching. See .README for specific fields.",
required=True,
)
arg_parser.add_argument(
"--concurrency",
type=int,
help="The number of processes to use to make requests, by default your # of cpus",
default=multiprocessing.cpu_count(),
)
arg_parser.add_argument(
"--logging",
type=str,
help='The logging level from ["debug", "info", "warning", "error", "critical"], by default "info"',
default="info",
)
# Record start time for tracking runtimes
start = time.time()
parsed_args = arg_parser.parse_args()
# Create dirs
bbox = parsed_args.bbox
traces_dir, tmp_dir, map_matches_dir, results_dir, log_filename = util.initialize_dirs(
bbox
)
# Set up logging (we do this after creating dirs so we can put the logs in a file under the dir we created)
logging.basicConfig(
level=getattr(logging, parsed_args.logging.upper(), None),
format="%(asctime)s.%(msecs)03d [%(levelname)s] %(message)s",
datefmt="%Y/%m/%d %H:%M:%S",
handlers=[logging.FileHandler(log_filename, mode="w"), logging.StreamHandler()],
)
logging.info("Pulling trace data from API...")
# Determine source of trace data specified by config
try:
trace_config = json.loads(parsed_args.trace_config)
except json.decoder.JSONDecodeError:
logging.critical(
"Could not parse --trace-config JSON={}".format(parsed_args.trace_config)
)
raise
# Pull and filter trace data
if trace_config["provider"] == "mapillary":
# Do a quick check to see if user specified the mandatory 'client_id' and 'client_secret' in config JSON
if "client_id" not in trace_config:
raise KeyError(
'Missing "client_id" (Mapillary Client ID) key in --trace-config JSON.'
)
if "client_secret" not in trace_config:
raise KeyError(
'Missing "client_secret" (Mapillary Client ID) key in --trace-config JSON.'
)
access_token = auth_server.run(
trace_config["client_id"], trace_config["client_secret"]
)
# Puts a small delay here to address a problem with Mapillary not registering the access_token immediately after
# distributing it
time.sleep(2)
mapillary.run(
parsed_args.bbox,
traces_dir,
tmp_dir,
trace_config,
parsed_args.concurrency,
access_token,
)
elif trace_config["provider"] == "mapillary_v3":
mapillary_v3.run(
parsed_args.bbox, traces_dir, tmp_dir, trace_config, parsed_args.concurrency
)
else:
raise NotImplementedError(
'Trace data source "{}" not supported. Currently supported: ["mapillary", "mapillary_v3"]'.format(
trace_config["provider"]
)
)
logging.info("Trace data pulled, map matching...")
# Determine source of map matching specified by config
try:
map_matching_config = json.loads(parsed_args.map_matching_config)
except json.decoder.JSONDecodeError:
logging.critical(
"Could not parse --map-matching-config JSON={}".format(
parsed_args.map_matching_config
)
)
raise
if map_matching_config["provider"] == "valhalla":
valhalla.run(traces_dir, map_matches_dir, parsed_args.concurrency, map_matching_config)
else:
raise NotImplementedError(
'Map matching source "{}" not supported. Currently supported: ["valhalla"]'.format(
map_matching_config["provider"]
)
)
logging.info("Map matching complete, aggregating data into final .json output files...")
aggregation.run(map_matches_dir, results_dir)
# Delete the tmp dir since we are finished with the run
shutil.rmtree(tmp_dir)
# Print out the time elapsed for this entire run
end = time.time()
logging.info("Script finished run in {} seconds.".format(round(end - start, 4)))
if __name__ == "__main__":
main()
|
main
|
index.ts
|
export * from './base';
export { AvlMap } from './map';
|
export { AvlSet } from './set';
|
|
const.go
|
package config
const (
// ApplicationName stores application name
ApplicationName = "Anonymous plugin"
// Repository stores repository
|
CommandTrigger = "anonymous"
// APIPath stores api prefix
APIPath = "/api/v1"
)
|
Repository = "mattermost-plugin-anonymous"
// CommandTrigger stores command trigger word
|
doc.py
|
# Copyright (C) 2007 Philipp Gortan <[email protected]>
# Copyright (C) 2009 Dr. Ralf Schlatterbeck Open Source Consulting.
# Reichergasse 131, A-3411 Weidling.
# Web: http://www.runtux.com Email: [email protected]
# All rights reserved
# ****************************************************************************
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# ****************************************************************************
#
#++
# Name
# doc
#
# Purpose
# Detectors for document class
#--
from roundup.exceptions import Reject
from roundup.cgi.TranslationService import get_translation
import common
import re
name_txt = "[0-9a-zA-Z/]+"
num_txt = "[0-9]{2}"
ref_txt = "[-0-9a-zA-Z/]+" # allow '-' for reference name
ref_re = re.compile ("^%s$" % ref_txt)
name_re = re.compile ("^%s$" % name_txt)
num_re = re.compile (num_txt)
doc_nr_re = re.compile ("^(%s-)+? (?P<suffix> [0-9]+ )$" % name_txt, re.X)
def check_document_required (db, cl, nodeid, newvalues) :
req = ['product_type', 'reference', 'artefact', 'doc_category', 'title']
if nodeid :
req.append ('document_nr')
req.append ('responsible')
common.require_attributes (_, cl, nodeid, newvalues, * req)
# end def check_document_required
def check_document_frozen (db, cl, nodeid, newvalues) :
if common.user_has_role (db, db.getuid (), 'Doc_Admin') :
return
action = _ ('modify')
if nodeid :
attr_lst = ('product_type', 'reference', 'artefact', 'doc_category')
if db.getuid () == '1' :
attr_lst = ('product_type', 'reference', 'artefact')
else :
attr_lst = ('document_nr',)
action = _ ('specify')
attrs = ", ".join (_ (a) for a in attr_lst if a in newvalues)
if attrs :
raise Reject, _ \
('You are not allowed to %(action)s: %(attrs)s'
% locals ()
)
# end def check_document_frozen
def check_document_nr (db, cl, nodeid, newvalues) :
"""Check or calculate document number."""
doc_nr = newvalues.get ('document_nr')
if doc_nr :
if not doc_nr_re.match (doc_nr) :
raise Reject (_ ('Document number is not valid: "%s"') % doc_nr)
elif not nodeid :
### Creation where no `document_nr` is given
prefix = "-".join \
( ( db.product_type.get (newvalues ['product_type'], 'name')
, db.reference.get (newvalues ['reference'] , 'name')
, db.artefact.get (newvalues ['artefact'] , 'name')
, _cat_doc_nr (db, cl, nodeid, newvalues)
, "" # for the trailing dash
)
)
next_nr = _next_document_nr (db, cl, prefix)
newvalues ['document_nr'] = "%s%03d" % (prefix, next_nr)
# end def check_document_nr
def _check_for_description (db, cl, nodeid, newvalues) :
"""Checks that `description` is given and unique."""
common.require_attributes (_, cl, nodeid, newvalues, 'description')
if 'description' in newvalues :
desc = newvalues ['description']
common.check_unique (_, cl, nodeid, description = desc)
# end def _check_for_description
check_product_type = _check_for_description
check_reference = _check_for_description
def get_wip (db) :
wip = None
for k in ('work in progress', 'Work in progress') :
try :
wip = db.doc_status.lookup (k)
except KeyError :
pass
if wip is None :
wip = '1'
return wip
# end def get_wip
def defaults (db, cl, nodeid, newvalues) :
if not newvalues.get ('responsible', None) :
newvalues ['responsible'] = db.getuid ()
# new doc item: always set status to work in progress
newvalues ['status'] = get_wip (db)
newvalues ['state_changed_by'] = db.getuid ()
# end def defaults
def _cat_doc_nr (db, cl, nodeid, newvalues) :
"""Return the selected doc_category's `doc_num`
"""
res = db.doc_category.get (newvalues ['doc_category'], 'doc_num')
assert res
return res
# end def _cat_doc_nr
def _next_document_nr (db, cl, prefix) :
filterspec = dict (document_nr = prefix)
res = db.doc.filter (None, filterspec, sort = ('-', 'document_nr'))
if res :
doc_nr = cl.get (res [0], "document_nr")
nr = int (doc_nr_re.match (doc_nr).group ("suffix"), 10)
return nr + 1
else :
return 1
# end def _next_document_nr
def check_name \
(db, cl, nodeid, newvalues, name = 'name', regex = name_re, txt = name_txt) :
if name not in newvalues or not newvalues [name] :
return
if not regex.match (newvalues [name]) :
raise Reject, _ ('Malformed %(attr)s: Only %(name)s allowed') \
% dict (attr = _ (name), name = txt)
# end def check_name
def check_doc_category (db, cl, nodeid, newvalues) :
common.require_attributes (_, cl, nodeid, newvalues, 'doc_num')
if 'valid' not in newvalues :
newvalues ['valid'] = True
check_name (db, cl, nodeid, newvalues, 'doc_num', num_re, num_txt)
# end def check_doc_category
def check_reference (db, cl, nodeid, newvalues) :
return check_name (db, cl, nodeid, newvalues, regex = ref_re, txt = ref_txt)
# end def check_reference
def check_statechange (db, cl, nodeid, newvalues) :
""" Things to do for a state change:
Add doc admins to nosy for certain state changes
"""
if 'status' not in newvalues :
return
oldstate = cl.get (nodeid, 'status')
newstate = newvalues ['status']
wip = get_wip (db)
if newstate != oldstate and oldstate != wip :
nosy = newvalues.get ('nosy', cl.get (nodeid, 'nosy'))
if not nosy :
nosy = [db.getuid ()]
nosy = dict.fromkeys (nosy)
for u in db.user.getnodeids () :
if common.user_has_role (db, u, 'Doc_Admin') :
nosy [u] = True
newvalues ['nosy'] = nosy.keys ()
if newstate != oldstate :
newvalues ['state_changed_by'] = db.getuid ()
st = db.doc_status.getnode (newstate)
if st.rq_link :
common.require_attributes (_, cl, nodeid, newvalues, 'link')
# end def check_statechange
def init (db) :
|
### __END__ doc
|
if 'doc' not in db.classes :
return
global _
_ = get_translation \
(db.config.TRACKER_LANGUAGE, db.config.TRACKER_HOME).gettext
for action in ('create', 'set') :
db.doc.audit (action, check_document_required, priority = 110)
db.doc.audit (action, check_document_frozen, priority = 120)
db.doc.audit (action, check_document_nr, priority = 130)
db.product_type.audit (action, check_product_type)
db.reference.audit (action, check_reference)
for cl in (db.product_type, db.artefact) :
cl.audit (action, check_name)
db.reference.audit (action, check_reference)
db.doc_category.audit (action, check_doc_category)
db.doc.audit ('create', defaults, 140)
db.doc.audit ('set', check_statechange)
|
replica.go
|
package rest
import (
"fmt"
"net/http"
"strconv"
"sync"
"time"
"github.com/gorilla/mux"
"github.com/openebs/jiva/types"
"github.com/prometheus/client_golang/prometheus"
"github.com/rancher/go-rancher/api"
"github.com/rancher/go-rancher/client"
"github.com/sirupsen/logrus"
)
var (
// OpenEBSJivaRegestrationRequestDuration gets the response time of the
// requested api.
OpenEBSJivaRegestrationRequestDuration = prometheus.NewHistogramVec(
prometheus.HistogramOpts{
Name: "openebs_jiva_registration_request_duration_seconds",
Help: "Request response time of the /v1/register to register replicas.",
Buckets: []float64{0.005, 0.01, 0.025, 0.05, 0.075, 0.1, 0.5, 1, 2.5, 5, 10},
},
// code is http code and method is http method returned by
// endpoint "/v1/volume"
[]string{"code", "method"},
)
// OpenEBSJivaRegestrationRequestCounter Count the no of request Since a request has been made on /v1/volume
OpenEBSJivaRegestrationRequestCounter = prometheus.NewCounterVec(
prometheus.CounterOpts{
Name: "openebs_jiva_registration_requests_total",
Help: "Total number of /v1/register requests to register replicas.",
},
[]string{"code", "method"},
)
prometheusLock sync.Mutex
)
// init registers Prometheus metrics.It's good to register these varibles here
// otherwise you need to register it before you are going to use it. So you will
// have to register it everytime unnecessarily, instead initialize it once and
// use anywhere at anytime through the code.
func init()
|
func (s *Server) ListReplicas(rw http.ResponseWriter, req *http.Request) error {
apiContext := api.GetApiContext(req)
resp := client.GenericCollection{}
s.c.Lock()
for _, r := range s.c.ListReplicas() {
resp.Data = append(resp.Data, NewReplica(apiContext, r))
}
s.c.Unlock()
resp.ResourceType = "replica"
resp.CreateTypes = map[string]string{
"replica": apiContext.UrlBuilder.Collection("replica"),
}
apiContext.Write(&resp)
return nil
}
func (s *Server) GetReplica(rw http.ResponseWriter, req *http.Request) error {
apiContext := api.GetApiContext(req)
vars := mux.Vars(req)
id, err := DencodeID(vars["id"])
if err != nil {
logrus.Errorf("Get Replica decodeid %v failed %v", id, err)
rw.WriteHeader(http.StatusNotFound)
return nil
}
r := s.getReplica(apiContext, id)
if r == nil {
logrus.Errorf("Get Replica failed for id %v", id)
rw.WriteHeader(http.StatusNotFound)
return nil
}
apiContext.Write(r)
return nil
}
func (s *Server) RegisterReplica(rw http.ResponseWriter, req *http.Request) error {
var (
regReplica RegReplica
localRevCount int64
code int
RequestDuration *prometheus.HistogramVec
RequestCounter *prometheus.CounterVec
)
start := time.Now()
apiContext := api.GetApiContext(req)
if err := apiContext.Read(®Replica); err != nil {
logrus.Errorf("read in RegReplica failed %v", err)
return err
}
localRevCount, _ = strconv.ParseInt(regReplica.RevCount, 10, 64)
local := types.RegReplica{
Address: regReplica.Address,
RevCount: localRevCount,
RepType: regReplica.RepType,
UpTime: regReplica.UpTime,
RepState: regReplica.RepState,
}
code = http.StatusOK
rw.WriteHeader(code)
defer func() {
RequestDuration = OpenEBSJivaRegestrationRequestDuration
RequestCounter = OpenEBSJivaRegestrationRequestCounter
prometheusLock.Lock()
// This will Display the metrics something similar to
// the examples given below
// exp: openebs_jiva_registration_request_duration_seconds{code="200", method="POST"}
RequestDuration.WithLabelValues(strconv.Itoa(code), req.Method).Observe(time.Since(start).Seconds())
// This will Display the metrics something similar to
// the examples given below
// exp: openebs_jiva_registration_requests_total{code="200", method="POST"}
RequestCounter.WithLabelValues(strconv.Itoa(code), req.Method).Inc()
prometheusLock.Unlock()
}()
return s.c.RegisterReplica(local)
}
func (s *Server) CreateReplica(rw http.ResponseWriter, req *http.Request) error {
var replica Replica
apiContext := api.GetApiContext(req)
if err := apiContext.Read(&replica); err != nil {
logrus.Errorf("read in createReplica failed %v", err)
return err
}
logrus.Infof("Create Replica for address %v", replica.Address)
if err := s.c.AddReplica(replica.Address); err != nil {
return err
}
r := s.getReplica(apiContext, replica.Address)
if r == nil {
logrus.Errorf("createReplica failed for id %v", replica.Address)
return fmt.Errorf("createReplica failed while getting it")
}
apiContext.Write(r)
return nil
}
func (s *Server) CreateQuorumReplica(rw http.ResponseWriter, req *http.Request) error {
var replica Replica
apiContext := api.GetApiContext(req)
if err := apiContext.Read(&replica); err != nil {
logrus.Errorf("read in createQuorumReplica failed %v", err)
return err
}
logrus.Infof("Create QuorumReplica for address %v", replica.Address)
if err := s.c.AddQuorumReplica(replica.Address); err != nil {
return err
}
r := s.getQuorumReplica(apiContext, replica.Address)
if r == nil {
logrus.Errorf("createQuorumReplica failed for id %v", replica.Address)
return fmt.Errorf("createQuorumReplica failed while getting it")
}
apiContext.Write(r)
return nil
}
func (s *Server) getReplica(context *api.ApiContext, id string) *Replica {
s.c.Lock()
defer s.c.Unlock()
for _, r := range s.c.ListReplicas() {
if r.Address == id {
return NewReplica(context, r)
}
}
return nil
}
func (s *Server) getQuorumReplica(context *api.ApiContext, id string) *Replica {
s.c.Lock()
defer s.c.Unlock()
for _, r := range s.c.ListQuorumReplicas() {
if r.Address == id {
return NewReplica(context, r)
}
}
return nil
}
func (s *Server) DeleteReplica(rw http.ResponseWriter, req *http.Request) error {
vars := mux.Vars(req)
id, err := DencodeID(vars["id"])
if err != nil {
logrus.Errorf("Getting ID in DeleteReplica failed %v", err)
rw.WriteHeader(http.StatusNotFound)
return nil
}
logrus.Infof("Delete Replica for id %v", id)
return s.c.RemoveReplica(id)
}
func (s *Server) UpdateReplica(rw http.ResponseWriter, req *http.Request) error {
vars := mux.Vars(req)
id, err := DencodeID(vars["id"])
if err != nil {
logrus.Errorf("Getting ID in UpdateReplica failed %v", err)
rw.WriteHeader(http.StatusNotFound)
return nil
}
logrus.Infof("Update Replica for id %v", id)
var replica Replica
apiContext := api.GetApiContext(req)
apiContext.Read(&replica)
if err := s.c.SetReplicaMode(id, types.Mode(replica.Mode)); err != nil {
return err
}
return s.GetReplica(rw, req)
}
func (s *Server) PrepareRebuildReplica(rw http.ResponseWriter, req *http.Request) error {
vars := mux.Vars(req)
id, err := DencodeID(vars["id"])
if err != nil {
logrus.Errorf("Getting ID in PrepareRebuildReplica failed %v", err)
rw.WriteHeader(http.StatusNotFound)
return nil
}
logrus.Infof("Prepare Rebuild Replica for id %v", id)
disks, err := s.c.PrepareRebuildReplica(id)
if err != nil {
logrus.Errorf("Prepare Rebuild Replica failed %v for id %v", err, id)
return err
}
apiContext := api.GetApiContext(req)
resp := &PrepareRebuildOutput{
Resource: client.Resource{
Id: id,
Type: "prepareRebuildOutput",
},
Disks: disks,
}
apiContext.Write(&resp)
return nil
}
func (s *Server) VerifyRebuildReplica(rw http.ResponseWriter, req *http.Request) error {
vars := mux.Vars(req)
id, err := DencodeID(vars["id"])
if err != nil {
logrus.Errorf("Error %v in getting id while verifyrebuildreplica", err)
rw.WriteHeader(http.StatusNotFound)
return nil
}
logrus.Infof("Verify Rebuild Replica for id %v", id)
if err := s.c.VerifyRebuildReplica(id); err != nil {
logrus.Errorf("Err %v in verifyrebuildreplica for id %v", err, id)
return err
}
return s.GetReplica(rw, req)
}
|
{
prometheus.MustRegister(OpenEBSJivaRegestrationRequestDuration)
prometheus.MustRegister(OpenEBSJivaRegestrationRequestCounter)
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.