file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
bench_test.go | // Copyright Istio Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. | // You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package xds
import (
"bytes"
"fmt"
"path"
"testing"
"text/template"
"time"
"github.com/Masterminds/sprig/v3"
discovery "github.com/envoyproxy/go-control-plane/envoy/service/discovery/v3"
"github.com/golang/protobuf/jsonpb"
"github.com/golang/protobuf/ptypes/any"
"k8s.io/client-go/kubernetes/fake"
networking "istio.io/api/networking/v1alpha3"
"istio.io/istio/pilot/pkg/config/kube/crd"
"istio.io/istio/pilot/pkg/model"
"istio.io/istio/pilot/pkg/networking/util"
kubesecrets "istio.io/istio/pilot/pkg/secrets/kube"
v3 "istio.io/istio/pilot/pkg/xds/v3"
"istio.io/istio/pilot/test/xdstest"
"istio.io/istio/pkg/config"
"istio.io/istio/pkg/config/schema/collections"
"istio.io/istio/pkg/spiffe"
"istio.io/pkg/env"
"istio.io/pkg/log"
)
// ConfigInput defines inputs passed to the test config templates
// This allows tests to do things like create a virtual service for each service, for example
type ConfigInput struct {
// Name of the test
Name string
// Name of the test config file to use. If not set, <Name> is used
ConfigName string
// Number of services to make
Services int
// Type of proxy to generate configs for
ProxyType model.NodeType
}
var testCases = []ConfigInput{
{
// Gateways provides an example config for a large Ingress deployment. This will create N
// virtual services and gateways, where routing is determined by hostname, meaning we generate N routes for HTTPS.
Name: "gateways",
Services: 1000,
ProxyType: model.Router,
},
{
// Gateways-shared provides an example config for a large Ingress deployment. This will create N
// virtual services and gateways, where routing is determined by path. This means there will be a single large route.
Name: "gateways-shared",
Services: 1000,
ProxyType: model.Router,
},
{
Name: "empty",
Services: 100,
ProxyType: model.SidecarProxy,
},
{
Name: "tls",
Services: 100,
},
{
Name: "telemetry",
Services: 100,
},
{
Name: "virtualservice",
Services: 100,
},
}
func disableLogging() {
for _, s := range log.Scopes() {
if s.Name() == benchmarkScope.Name() {
continue
}
s.SetOutputLevel(log.NoneLevel)
}
}
func BenchmarkInitPushContext(b *testing.B) {
disableLogging()
for _, tt := range testCases {
b.Run(tt.Name, func(b *testing.B) {
s, proxy := setupTest(b, tt)
b.ResetTimer()
for n := 0; n < b.N; n++ {
initPushContext(s.Env(), proxy)
}
})
}
}
func BenchmarkRouteGeneration(b *testing.B) {
disableLogging()
for _, tt := range testCases {
b.Run(tt.Name, func(b *testing.B) {
s, proxy := setupAndInitializeTest(b, tt)
// To determine which routes to generate, first gen listeners once (not part of benchmark) and extract routes
l := s.Discovery.ConfigGenerator.BuildListeners(proxy, s.PushContext())
routeNames := xdstest.ExtractRoutesFromListeners(l)
if len(routeNames) == 0 {
b.Fatal("Got no route names!")
}
b.ResetTimer()
var c model.Resources
for n := 0; n < b.N; n++ {
c, _ = s.Discovery.Generators[v3.RouteType].Generate(proxy, s.PushContext(), &model.WatchedResource{ResourceNames: routeNames}, nil)
if len(c) == 0 {
b.Fatal("Got no routes!")
}
}
logDebug(b, c)
})
}
}
func BenchmarkClusterGeneration(b *testing.B) {
disableLogging()
for _, tt := range testCases {
b.Run(tt.Name, func(b *testing.B) {
s, proxy := setupAndInitializeTest(b, tt)
b.ResetTimer()
var c model.Resources
for n := 0; n < b.N; n++ {
c, _ = s.Discovery.Generators[v3.ClusterType].Generate(proxy, s.PushContext(), nil, nil)
if len(c) == 0 {
b.Fatal("Got no clusters!")
}
}
logDebug(b, c)
})
}
}
func BenchmarkListenerGeneration(b *testing.B) {
disableLogging()
for _, tt := range testCases {
b.Run(tt.Name, func(b *testing.B) {
s, proxy := setupAndInitializeTest(b, tt)
b.ResetTimer()
var c model.Resources
for n := 0; n < b.N; n++ {
c, _ = s.Discovery.Generators[v3.ListenerType].Generate(proxy, s.PushContext(), nil, nil)
if len(c) == 0 {
b.Fatal("Got no listeners!")
}
}
logDebug(b, c)
})
}
}
func BenchmarkNameTableGeneration(b *testing.B) {
disableLogging()
for _, tt := range testCases {
b.Run(tt.Name, func(b *testing.B) {
s, proxy := setupAndInitializeTest(b, tt)
b.ResetTimer()
var c model.Resources
for n := 0; n < b.N; n++ {
c, _ = s.Discovery.Generators[v3.NameTableType].Generate(proxy, s.PushContext(), nil, nil)
if len(c) == 0 && tt.ProxyType != model.Router {
b.Fatal("Got no name tables!")
}
}
logDebug(b, c)
})
}
}
func BenchmarkSecretGeneration(b *testing.B) {
disableLogging()
cases := []ConfigInput{
{
Name: "secrets",
Services: 10,
},
{
Name: "secrets",
Services: 1000,
},
}
for _, tt := range cases {
b.Run(fmt.Sprintf("%s-%d", tt.Name, tt.Services), func(b *testing.B) {
tmpl := template.Must(template.New("").Funcs(sprig.TxtFuncMap()).ParseFiles(path.Join("testdata", "benchmarks", tt.Name+".yaml")))
var buf bytes.Buffer
if err := tmpl.ExecuteTemplate(&buf, tt.Name+".yaml", tt); err != nil {
b.Fatalf("failed to execute template: %v", err)
}
s := NewFakeDiscoveryServer(b, FakeOptions{
KubernetesObjectString: buf.String(),
})
kubesecrets.DisableAuthorizationForTest(s.KubeClient().Kube().(*fake.Clientset))
watchedResources := []string{}
for i := 0; i < tt.Services; i++ {
watchedResources = append(watchedResources, fmt.Sprintf("kubernetes://istio-system/sds-credential-%d", i))
}
proxy := s.SetupProxy(&model.Proxy{Type: model.Router, ConfigNamespace: "istio-system", VerifiedIdentity: &spiffe.Identity{}})
gen := s.Discovery.Generators[v3.SecretType]
res := &model.WatchedResource{ResourceNames: watchedResources}
b.ResetTimer()
var c model.Resources
for n := 0; n < b.N; n++ {
c, _ = gen.Generate(proxy, s.PushContext(), res, &model.PushRequest{Full: true})
if len(c) == 0 {
b.Fatal("Got no secrets!")
}
}
logDebug(b, c)
})
}
}
// BenchmarkEDS measures performance of EDS config generation
// TODO Add more variables, such as different services
func BenchmarkEndpointGeneration(b *testing.B) {
disableLogging()
tests := []struct {
endpoints int
services int
}{
{1, 100},
{10, 10},
{100, 10},
{1000, 1},
}
var response *discovery.DiscoveryResponse
for _, tt := range tests {
b.Run(fmt.Sprintf("%d/%d", tt.endpoints, tt.services), func(b *testing.B) {
s := NewFakeDiscoveryServer(b, FakeOptions{
Configs: createEndpoints(tt.endpoints, tt.services),
})
proxy := &model.Proxy{
Type: model.SidecarProxy,
IPAddresses: []string{"10.3.3.3"},
ID: "random",
ConfigNamespace: "default",
Metadata: &model.NodeMetadata{},
}
push := s.Discovery.globalPushContext()
proxy.SetSidecarScope(push)
b.ResetTimer()
for n := 0; n < b.N; n++ {
loadAssignments := make([]*any.Any, 0)
for svc := 0; svc < tt.services; svc++ {
l := s.Discovery.generateEndpoints(NewEndpointBuilder(fmt.Sprintf("outbound|80||foo-%d.com", svc), proxy, push))
loadAssignments = append(loadAssignments, util.MessageToAny(l))
}
response = endpointDiscoveryResponse(loadAssignments, version, push.LedgerVersion)
}
logDebug(b, response.GetResources())
})
}
}
// Setup test builds a mock test environment. Note: push context is not initialized, to be able to benchmark separately
// most should just call setupAndInitializeTest
func setupTest(t testing.TB, config ConfigInput) (*FakeDiscoveryServer, *model.Proxy) {
proxyType := config.ProxyType
if proxyType == "" {
proxyType = model.SidecarProxy
}
proxy := &model.Proxy{
Type: proxyType,
IPAddresses: []string{"1.1.1.1"},
ID: "v0.default",
DNSDomain: "default.example.org",
Metadata: &model.NodeMetadata{
Namespace: "default",
Labels: map[string]string{
"istio.io/benchmark": "true",
},
IstioVersion: "1.9.0",
},
// TODO: if you update this, make sure telemetry.yaml is also updated
IstioVersion: &model.IstioVersion{Major: 1, Minor: 6},
ConfigNamespace: "default",
}
configs := getConfigsWithCache(t, config)
s := NewFakeDiscoveryServer(t, FakeOptions{
Configs: configs,
// Allow debounce to avoid overwhelming with writes
DebounceTime: time.Millisecond * 10,
})
return s, proxy
}
var configCache = map[ConfigInput][]config.Config{}
func getConfigsWithCache(t testing.TB, input ConfigInput) []config.Config {
// Config setup is slow for large tests. Cache this and return from Cache.
// This improves even running a single test, as go will run the full test (including setup) at least twice.
if cached, f := configCache[input]; f {
return cached
}
configName := input.ConfigName
if configName == "" {
configName = input.Name
}
tmpl := template.Must(template.New("").Funcs(sprig.TxtFuncMap()).ParseFiles(path.Join("testdata", "benchmarks", configName+".yaml")))
var buf bytes.Buffer
if err := tmpl.ExecuteTemplate(&buf, configName+".yaml", input); err != nil {
t.Fatalf("failed to execute template: %v", err)
}
configs, badKinds, err := crd.ParseInputs(buf.String())
if err != nil {
t.Fatalf("failed to read config: %v", err)
}
if len(badKinds) != 0 {
t.Fatalf("Got unknown resources: %v", badKinds)
}
// setup default namespace if not defined
for i, c := range configs {
if c.Namespace == "" {
c.Namespace = "default"
}
configs[i] = c
}
configCache[input] = configs
return configs
}
func setupAndInitializeTest(t testing.TB, config ConfigInput) (*FakeDiscoveryServer, *model.Proxy) {
s, proxy := setupTest(t, config)
initPushContext(s.Env(), proxy)
return s, proxy
}
func initPushContext(env *model.Environment, proxy *model.Proxy) {
env.PushContext.InitContext(env, nil, nil)
proxy.SetSidecarScope(env.PushContext)
proxy.SetGatewaysForProxy(env.PushContext)
proxy.SetServiceInstances(env.ServiceDiscovery)
}
var debugGeneration = env.RegisterBoolVar("DEBUG_CONFIG_DUMP", false, "if enabled, print a full config dump of the generated config")
var benchmarkScope = log.RegisterScope("benchmark", "", 0)
// Add additional debug info for a test
func logDebug(b *testing.B, m model.Resources) {
b.Helper()
b.StopTimer()
if debugGeneration.Get() {
for i, r := range m {
s, err := (&jsonpb.Marshaler{Indent: " "}).MarshalToString(r)
if err != nil {
b.Fatal(err)
}
// Cannot use b.Logf, it truncates
benchmarkScope.Infof("Generated: %d %s", i, s)
}
}
bytes := 0
for _, r := range m {
bytes += len(r.Value)
}
b.ReportMetric(float64(bytes)/1000, "kb/msg")
b.ReportMetric(float64(len(m)), "resources/msg")
b.StartTimer()
}
func createEndpoints(numEndpoints int, numServices int) []config.Config {
result := make([]config.Config, 0, numServices)
for s := 0; s < numServices; s++ {
endpoints := make([]*networking.WorkloadEntry, 0, numEndpoints)
for e := 0; e < numEndpoints; e++ {
endpoints = append(endpoints, &networking.WorkloadEntry{Address: fmt.Sprintf("111.%d.%d.%d", e/(256*256), (e/256)%256, e%256)})
}
result = append(result, config.Config{
Meta: config.Meta{
GroupVersionKind: collections.IstioNetworkingV1Alpha3Serviceentries.Resource().GroupVersionKind(),
Name: fmt.Sprintf("foo-%d", s),
Namespace: "default",
CreationTimestamp: time.Now(),
},
Spec: &networking.ServiceEntry{
Hosts: []string{fmt.Sprintf("foo-%d.com", s)},
Ports: []*networking.Port{
{Number: 80, Name: "http-port", Protocol: "http"},
},
Endpoints: endpoints,
Resolution: networking.ServiceEntry_STATIC,
},
})
}
return result
} | |
permissions-response.model.ts | /*
* Copyright 2020 InfAI (CC SES)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software | * See the License for the specific language governing permissions and
* limitations under the License.
*/
export interface PermissionsResponseModel {
status: string;
} | * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
mad_api.py | import time
from typing import Dict, List, Optional, Tuple
import requests
import utility.args
class MadObj:
def __init__(self, api, obj_id: int):
assert obj_id >= 0
self.id = obj_id
self._data = {}
self._api = api # type:Api
def _update_data(self):
raise NotImplementedError
@property
def raw_data(self) -> dict:
if not self._data:
self._update_data()
return self._data
class Geofence(MadObj):
def __init__(self, api, geofence_id: int):
super().__init__(api, geofence_id)
self._sa = {}
def _update_data(self):
self._data = self._api.get_json(f'/api/geofence/{self.id}')
@property
def name(self) -> str:
return self.raw_data['name']
@property
def fence_type(self) -> str:
return self.raw_data['fence_type']
@property
def sub_areas(self) -> Dict[str, List[Tuple[float, float]]]:
if not self._sa:
self._sa = {}
name = ''
points = []
for line in self.raw_data['fence_data']: # type:str
if line[0] == '[' and line[-1] == ']':
# save previous sub area
if points:
self._sa[name] = points
name = line[1:-1]
points = []
else:
p, q = line.split(',')
points.append((float(p), float(q)))
if points:
self._sa[name] = points
return self._sa
class Area(MadObj):
def __init__(self, api, area_id: int, name: Optional[str] = None):
super().__init__(api, area_id)
self._name: Optional[str] = name
self._sp: List[dict] = []
self._gi: Optional[Geofence] = None
def __repr__(self):
return f"{self.name} ({self.id})"
def _update_data(self):
self._data = self._api.get_json(f'/api/area/{self.id}')
@property
def init(self) -> bool:
return self.raw_data['init']
@property
def name(self) -> str:
return self._name or self.raw_data['name']
@property
def mode(self):
return self.raw_data['mode']
@property
def geofence_included(self) -> Optional[Geofence]:
if not self._gi:
id_ = self.raw_data.get('geofence_included', None) # type:Optional[str]
if id_ is None:
return None
self._gi = Geofence(self._api, int(id_[id_.rfind('/') + 1:]))
return self._gi
def recalculate(self, wait: bool = True, wait_initial: float = 5, wait_interval: float = 1):
self._api.post(f'/api/area/{self.id}', call="recalculate")
if wait:
wait_interval = min(wait_initial, wait_interval)
if not self.is_recalculating:
# either, recalculation was incredibly quick (and we will waste wait_initial seconds), or it has not started yet
wait_start = time.time()
while time.time() - wait_start < wait_initial:
time.sleep(wait_interval)
if self.is_recalculating:
break
# at this point recalculation should be running
while self.is_recalculating:
time.sleep(wait_interval)
@property
def is_recalculating(self) -> bool:
return self.id in self._api.get_json('/recalc_status')
@property
def spawnpoints(self) -> List[dict]:
if not self._sp:
self._sp = []
for index in range(len(self.geofence_included.sub_areas)):
self._sp.extend(self._api.get_json('/get_spawn_details', area_id=self.id, event_id=1, mode='ALL', index=index))
return self._sp
@property
def routecalc_id(self) -> int:
id_ = self.raw_data['routecalc'] # type:str
return int(id_[id_.rfind('/') + 1:])
@property
def routecalc(self) -> List[Tuple[float, float]]:
data = [line.split(',') for line in self._api.get_json(f'/api/routecalc/{self.routecalc_id}')['routefile']]
return [(float(lat), float(lon)) for lat, lon in data]
@routecalc.setter
def routecalc(self, data: List[Tuple[float, float]]):
data = [','.join(map(str, line)) for line in data]
self._api.patch(f'/api/routecalc/{self.routecalc_id}', routefile=data)
class Api:
def __init__(self):
args = utility.args.parse_args()
self._mad_url: str = args['madmin_url']
self._mad_auth = (args['madmin_user'], args['madmin_password']) if args['madmin_user'] else None
self._areas = {}
try:
if requests.get(self._mad_url + '/settings/areas', auth=self._mad_auth).status_code != 200:
raise ValueError("Error trying to access MAD Api. Please check your config.")
except requests.exceptions.ConnectionError:
raise ValueError("Could not reach MAD. Please check your config, especially madmin_url")
def _update_areas(self):
areas = self.get_json('/api/area')['results']
areas = {int(area_id[area_id.rfind('/') + 1:]): name for area_id, name in areas.items()}
self._areas = {area_id: Area(self, area_id, name) for area_id, name in sorted(areas.items(), key=lambda k: k[0])}
@property
def areas(self) -> Dict[int, Area]:
if not self._areas:
self._update_areas()
return self._areas
def get(self, path: str, **kwargs):
requests.get(self._mad_url + path, params=kwargs, auth=self._mad_auth)
def get_json(self, path: str, **kwargs):
return requests.get(self._mad_url + path, params=kwargs, auth=self._mad_auth).json()
def post(self, path: str, **kwargs):
requests.post(self._mad_url + path, json=kwargs, headers={'Content-Type': 'application/json-rpc'}, auth=self._mad_auth)
def | (self, path: str, **kwargs):
requests.patch(self._mad_url + path, json=kwargs, auth=self._mad_auth)
def apply_settings(self):
self.get('/reload')
| patch |
pg_pool.rs | use criterion::{criterion_group, criterion_main, Bencher, Criterion};
use sqlx::PgPool;
use sqlx::postgres::PgPoolOptions;
use std::time::{Duration, Instant};
fn bench_pgpool_acquire(c: &mut Criterion) {
let mut group = c.benchmark_group("bench_pgpool_acquire");
for &concurrent in [5u32, 10, 50, 100, 500, 1000, 5000 /*, 10_000, 50_000*/].iter() {
for &fair in [false, true].iter() {
let fairness = if fair { "(fair)" } else { "(unfair)" };
group.bench_with_input(
format!("{} concurrent {}", concurrent, fairness),
&(concurrent, fair),
|b, &(concurrent, fair)| do_bench_acquire(b, concurrent, fair),
);
}
}
group.finish();
}
fn | (b: &mut Bencher, concurrent: u32, fair: bool) {
let pool = sqlx_rt::block_on(
PgPoolOptions::new()
// we don't want timeouts because we want to see how the pool degrades
.connect_timeout(Duration::from_secs(3600))
// force the pool to start full
.min_connections(50)
.max_connections(50)
// we're not benchmarking `ping()`
.test_before_acquire(false)
.__fair(fair)
.connect(
&dotenv::var("DATABASE_URL").expect("DATABASE_URL must be set to run benchmarks"),
),
)
.expect("failed to open PgPool");
for _ in 0..concurrent {
let pool = pool.clone();
sqlx_rt::enter_runtime(|| {
sqlx_rt::spawn(async move {
while !pool.is_closed() {
let conn = match pool.acquire().await {
Ok(conn) => conn,
Err(sqlx::Error::PoolClosed) => break,
Err(e) => panic!("failed to acquire concurrent connection: {}", e),
};
// pretend we're using the connection
sqlx_rt::sleep(Duration::from_micros(500)).await;
drop(criterion::black_box(conn));
}
})
});
}
b.iter_custom(|iters| {
sqlx_rt::block_on(async {
// take the start time inside the future to make sure we only count once it's running
let start = Instant::now();
for _ in 0..iters {
criterion::black_box(
pool.acquire()
.await
.expect("failed to acquire connection for benchmark"),
);
}
start.elapsed()
})
});
sqlx_rt::block_on(pool.close());
}
criterion_group!(pg_pool, bench_pgpool_acquire);
criterion_main!(pg_pool);
| do_bench_acquire |
flagger_client.go | /*
Copyright 2020 The Flux authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
package v1beta1
import (
v1beta1 "github.com/fluxcd/flagger/pkg/apis/flagger/v1beta1"
"github.com/fluxcd/flagger/pkg/client/clientset/versioned/scheme"
rest "k8s.io/client-go/rest"
)
type FlaggerV1beta1Interface interface {
RESTClient() rest.Interface
AlertProvidersGetter
CanariesGetter
MetricTemplatesGetter
}
// FlaggerV1beta1Client is used to interact with features provided by the flagger.app group.
type FlaggerV1beta1Client struct {
restClient rest.Interface
}
func (c *FlaggerV1beta1Client) AlertProviders(namespace string) AlertProviderInterface {
return newAlertProviders(c, namespace)
}
func (c *FlaggerV1beta1Client) Canaries(namespace string) CanaryInterface {
return newCanaries(c, namespace)
}
func (c *FlaggerV1beta1Client) MetricTemplates(namespace string) MetricTemplateInterface {
return newMetricTemplates(c, namespace)
}
// NewForConfig creates a new FlaggerV1beta1Client for the given config.
func NewForConfig(c *rest.Config) (*FlaggerV1beta1Client, error) {
config := *c
if err := setConfigDefaults(&config); err != nil {
return nil, err
}
client, err := rest.RESTClientFor(&config)
if err != nil {
return nil, err
}
return &FlaggerV1beta1Client{client}, nil
}
// NewForConfigOrDie creates a new FlaggerV1beta1Client for the given config and
// panics if there is an error in the config.
func NewForConfigOrDie(c *rest.Config) *FlaggerV1beta1Client {
client, err := NewForConfig(c)
if err != nil {
panic(err)
}
return client
}
// New creates a new FlaggerV1beta1Client for the given RESTClient.
func New(c rest.Interface) *FlaggerV1beta1Client {
return &FlaggerV1beta1Client{c}
}
func setConfigDefaults(config *rest.Config) error {
gv := v1beta1.SchemeGroupVersion
config.GroupVersion = &gv
config.APIPath = "/apis"
config.NegotiatedSerializer = scheme.Codecs.WithoutConversion()
if config.UserAgent == "" |
return nil
}
// RESTClient returns a RESTClient that is used to communicate
// with API server by this client implementation.
func (c *FlaggerV1beta1Client) RESTClient() rest.Interface {
if c == nil {
return nil
}
return c.restClient
}
| {
config.UserAgent = rest.DefaultKubernetesUserAgent()
} |
make_rpc_call_client_test.go | package rpcutil_test
import (
"errors"
"net"
"net/http"
"net/rpc"
"testing"
"github.com/stretchr/testify/assert"
"github.com/wenerme/uo/pkg/rpcutil"
)
type Args struct {
A, B int
}
type Quotient struct {
Quo, Rem int
}
type Arith int
func (t *Arith) Multiply(args *Args, reply *int) error {
*reply = args.A * args.B
return nil
}
func (t *Arith) Divide(args *Args, quo *Quotient) error {
if args.B == 0 {
return errors.New("divide by zero")
}
quo.Quo = args.A / args.B
quo.Rem = args.A % args.B
return nil
}
type ArithClient struct {
Multiply func(args *Args) (int, error)
Divide func(args *Args) (Quotient, error)
}
func TestMakeCallClient(t *testing.T) {
arith := new(Arith)
assert.NoError(t, rpc.Register(arith))
rpc.HandleHTTP()
l, e := net.Listen("tcp", ":1234")
assert.NoError(t, e)
go func() { | c := &ArithClient{}
client, err := rpc.DialHTTP("tcp", "127.0.0.1:1234")
assert.NoError(t, err)
assert.NoError(t, rpcutil.MakeRPCCallClient(client.Call, "Arith", c))
{
rel, err := c.Multiply(&Args{A: 10, B: 2})
assert.NoError(t, err)
assert.Equal(t, rel, 20)
}
{
rel, err := c.Divide(&Args{A: 10, B: 2})
assert.NoError(t, err)
assert.Equal(t, rel, Quotient{Quo: 5, Rem: 0})
}
} | assert.NoError(t, http.Serve(l, nil))
}()
// Client |
Smallest-Integer-Divisible-by-K.py | """ Given a positive integer k, you need to find the length of the smallest positive integer n such that n is divisible by k, and n only contains the digit 1.
Return the length of n. If there is no such n, return -1.
Note: n may not fit in a 64-bit signed integer.
Example 1:
Input: k = 1
Output: 1
Explanation: The smallest answer is n = 1, which has length 1.
Example 2:
Input: k = 2
Output: -1
Explanation: There is no such positive integer n divisible by 2.
Example 3:
Input: k = 3
Output: 3
Explanation: The smallest answer is n = 111, which has length 3.
Constraints:
1 <= k <= 105
"""
class Solution:
def smallestRepunitDivByK(self, k: int) -> int:
if(k % 2 == 0 or k % 5 == 0):
return -1
number = 0
len = 1
for len in range(1, k + 1):
number = (number * 10 + 1) % k
if number == 0:
return len
return -1
# test Solution Code
sl = Solution
print(sl.smallestRepunitDivByK(sl,k=1))
print(sl.smallestRepunitDivByK(sl,k=2)) | print(sl.smallestRepunitDivByK(sl,k=3)) |
|
temporalisomorphvf2.py | # -*- coding: utf-8 -*-
"""
*****************************
Time-respecting VF2 Algorithm
*****************************
An extension of the VF2 algorithm for time-respecting graph ismorphism
testing in temporal graphs.
A temporal graph is one in which edges contain a datetime attribute,
denoting when interaction occurred between the incident nodes. A
time-respecting subgraph of a temporal graph is a subgraph such that
all interactions incident to a node occurred within a time threshold,
delta, of each other. A directed time-respecting subgraph has the
added constraint that incoming interactions to a node must precede
outgoing interactions from the same node - this enforces a sense of
directed flow.
Introduction
------------
The TimeRespectingGraphMatcher and TimeRespectingDiGraphMatcher
extend the GraphMatcher and DiGraphMatcher classes, respectively,
to include temporal constraints on matches. This is achieved through
a semantic check, via the semantic_feasibility() function.
As well as including G1 (the graph in which to seek embeddings) and
G2 (the subgraph structure of interest), the name of the temporal
attribute on the edges and the time threshold, delta, must be supplied
as arguments to the matching constructors.
A delta of zero is the strictest temporal constraint on the match -
only embeddings in which all interactions occur at the same time will
be returned. A delta of one day will allow embeddings in which
adjacent interactions occur up to a day apart.
Examples
--------
Examples will be provided when the datetime type has been incorporated.
Temporal Subgraph Isomorphism
-----------------------------
A brief discussion of the somewhat diverse current literature will be
included here.
References
----------
[1] Redmond, U. and Cunningham, P. Temporal subgraph isomorphism. In:
The 2013 IEEE/ACM International Conference on Advances in Social
Networks Analysis and Mining (ASONAM). Niagara Falls, Canada; 2013:
pages 1451 - 1452. [65]
For a discussion of the literature on temporal networks:
[3] P. Holme and J. Saramaki. Temporal networks. Physics Reports,
519(3):97–125, 2012.
Notes
-----
Handles directed and undirected graphs and graphs with parallel edges.
"""
from __future__ import absolute_import
import cynetworkx as nx
from datetime import datetime, timedelta
from .isomorphvf2 import GraphMatcher, DiGraphMatcher
__all__ = ['TimeRespectingGraphMatcher',
'TimeRespectingDiGraphMatcher']
class TimeRespectingGraphMatcher(GraphMatcher):
def __init__(self, G1, G2, temporal_attribute_name, delta):
"""Initialize TimeRespectingGraphMatcher.
G1 and G2 should be nx.Graph or nx.MultiGraph instances.
Examples
--------
To create a TimeRespectingGraphMatcher which checks for
syntactic and semantic feasibility:
>>> from cynetworkx.algorithms import isomorphism
>>> G1 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
>>> G2 = nx.Graph(nx.path_graph(4, create_using=nx.Graph()))
>>> GM = isomorphism.TimeRespectingGraphMatcher(G1, G2, 'date', timedelta(days=1))
"""
self.temporal_attribute_name = temporal_attribute_name
self.delta = delta
super(TimeRespectingGraphMatcher, self).__init__(G1, G2)
def one_hop(self, Gx, Gx_node, neighbors):
"""
Edges one hop out from a node in the mapping should be
time-respecting with respect to each other.
"""
dates = []
for n in neighbors:
if type(Gx) == type(nx.Graph()): # Graph G[u][v] returns the data dictionary.
dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
for edge in Gx[Gx_node][n].values(): # Iterates all edges between node pair.
dates.append(edge[self.temporal_attribute_name])
if any(x is None for x in dates):
raise ValueError('Datetime not supplied for at least one edge.')
return not dates or max(dates) - min(dates) <= self.delta
def two_hop(self, Gx, core_x, Gx_node, neighbors):
"""
Paths of length 2 from Gx_node should be time-respecting.
"""
return all(self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node]) for v in neighbors)
def semantic_feasibility(self, G1_node, G2_node):
"""Returns True if adding (G1_node, G2_node) is semantically
feasible.
Any subclass which redefines semantic_feasibility() must
maintain the self.tests if needed, to keep the match() method
functional. Implementations should consider multigraphs.
"""
neighbors = [n for n in self.G1[G1_node] if n in self.core_1]
if not self.one_hop(self.G1, G1_node, neighbors): # Fail fast on first node.
return False
if not self.two_hop(self.G1, self.core_1, G1_node, neighbors):
return False
# Otherwise, this node is semantically feasible!
return True
class TimeRespectingDiGraphMatcher(DiGraphMatcher):
def __init__(self, G1, G2, temporal_attribute_name, delta):
"""Initialize TimeRespectingDiGraphMatcher.
G1 and G2 should be nx.DiGraph or nx.MultiDiGraph instances.
Examples
--------
To create a TimeRespectingDiGraphMatcher which checks for
syntactic and semantic feasibility:
>>> from cynetworkx.algorithms import isomorphism
>>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
>>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph()))
>>> GM = isomorphism.TimeRespectingDiGraphMatcher(G1, G2, 'date', timedelta(days=1))
"""
self.temporal_attribute_name = temporal_attribute_name
self.delta = delta
super(TimeRespectingDiGraphMatcher, self).__init__(G1, G2)
def get_pred_dates(self, Gx, Gx_node, core_x, pred):
"""
Get the dates of edges from predecessors.
"""
pred_dates = []
if type(Gx) == type(nx.DiGraph()): # Graph G[u][v] returns the data dictionary.
for n in pred:
pred_dates.append(Gx[n][Gx_node][self.temporal_attribute_name])
else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
for n in pred:
for edge in Gx[n][Gx_node].values(): # Iterates all edge data between node pair.
pred_dates.append(edge[self.temporal_attribute_name])
return pred_dates
def get_succ_dates(self, Gx, Gx_node, core_x, succ):
"""
Get the dates of edges to successors.
"""
succ_dates = []
if type(Gx) == type(nx.DiGraph()): # Graph G[u][v] returns the data dictionary.
for n in succ:
succ_dates.append(Gx[Gx_node][n][self.temporal_attribute_name])
else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary.
for n in succ:
for edge in Gx[Gx_node][n].values(): # Iterates all edge data between node pair.
succ_dates.append(edge[self.temporal_attribute_name])
return succ_dates |
def one_hop(self, Gx, Gx_node, core_x, pred, succ):
"""
The ego node.
"""
pred_dates = self.get_pred_dates(Gx, Gx_node, core_x, pred)
succ_dates = self.get_succ_dates(Gx, Gx_node, core_x, succ)
return self.test_one(pred_dates, succ_dates) and self.test_two(pred_dates, succ_dates)
def two_hop_pred(self, Gx, Gx_node, core_x, pred):
"""
The predeccessors of the ego node.
"""
return all(self.one_hop(Gx, p, core_x, self.preds(Gx, core_x, p), self.succs(Gx, core_x, p, Gx_node)) for p in pred)
def two_hop_succ(self, Gx, Gx_node, core_x, succ):
"""
The successors of the ego node.
"""
return all(self.one_hop(Gx, s, core_x, self.preds(Gx, core_x, s, Gx_node), self.succs(Gx, core_x, s)) for s in succ)
def preds(self, Gx, core_x, v, Gx_node=None):
pred = [n for n in Gx.predecessors(v) if n in core_x]
if Gx_node:
pred.append(Gx_node)
return pred
def succs(self, Gx, core_x, v, Gx_node=None):
succ = [n for n in Gx.successors(v) if n in core_x]
if Gx_node:
succ.append(Gx_node)
return succ
def test_one(self, pred_dates, succ_dates):
"""
Edges one hop out from Gx_node in the mapping should be
time-respecting with respect to each other, regardless of
direction.
"""
time_respecting = True
dates = pred_dates + succ_dates
if any(x is None for x in dates):
raise ValueError('Date or datetime not supplied for at least one edge.')
dates.sort() # Small to large.
if 0 < len(dates) and not (dates[-1] - dates[0] <= self.delta):
time_respecting = False
return time_respecting
def test_two(self, pred_dates, succ_dates):
"""
Edges from a dual Gx_node in the mapping should be ordered in
a time-respecting manner.
"""
time_respecting = True
pred_dates.sort()
succ_dates.sort()
# First out before last in; negative of the necessary condition for time-respect.
if 0 < len(succ_dates) and 0 < len(pred_dates) and succ_dates[0] < pred_dates[-1]:
time_respecting = False
return time_respecting
def semantic_feasibility(self, G1_node, G2_node):
"""Returns True if adding (G1_node, G2_node) is semantically
feasible.
Any subclass which redefines semantic_feasibility() must
maintain the self.tests if needed, to keep the match() method
functional. Implementations should consider multigraphs.
"""
pred, succ = [n for n in self.G1.predecessors(G1_node) if n in self.core_1], [
n for n in self.G1.successors(G1_node) if n in self.core_1]
if not self.one_hop(self.G1, G1_node, self.core_1, pred, succ): # Fail fast on first node.
return False
if not self.two_hop_pred(self.G1, G1_node, self.core_1, pred):
return False
if not self.two_hop_succ(self.G1, G1_node, self.core_1, succ):
return False
# Otherwise, this node is semantically feasible!
return True | |
service.rs | // Copyright 2019-2021 Parity Technologies (UK) Ltd.
// This file is part of Cumulus.
// Cumulus is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Cumulus is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Cumulus. If not, see <http://www.gnu.org/licenses/>.
use crate::rpc;
use cumulus_client_consensus_aura::{
build_aura_consensus, BuildAuraConsensusParams, SlotProportion,
};
use cumulus_client_consensus_common::{
ParachainCandidate, ParachainConsensus,
};
use cumulus_client_network::build_block_announce_validator;
use cumulus_client_service::{
prepare_node_config, start_collator, start_full_node, StartCollatorParams, StartFullNodeParams,
};
use cumulus_primitives_core::{
relay_chain::v1::{Hash as PHash, PersistedValidationData},
ParaId,
};
use polkadot_service::NativeExecutionDispatch;
pub use parachains_common::{AccountId, Balance, Block, Hash, Header, Index as Nonce};
use futures::lock::Mutex;
use sc_client_api::ExecutorProvider;
use sc_consensus::{
import_queue::{ Verifier as VerifierT},
BlockImportParams,
};
use sc_executor::NativeElseWasmExecutor;
use sc_network::NetworkService;
use sc_service::{Configuration, PartialComponents, Role, TFullBackend, TFullClient, TaskManager};
use sc_telemetry::{Telemetry, TelemetryHandle, TelemetryWorker, TelemetryWorkerHandle};
use sp_api::{ApiExt, ConstructRuntimeApi};
use sp_consensus::{CacheKeyId, SlotData};
use sp_consensus_aura::{sr25519::AuthorityId as AuraId, AuraApi};
use sp_keystore::SyncCryptoStorePtr;
use sp_runtime::{
generic::BlockId,
traits::{BlakeTwo256, Header as HeaderT},
};
use std::sync::Arc;
use substrate_prometheus_endpoint::Registry;
/// Native executor instance.
pub struct DevelopmentRuntimeExecutor;
impl sc_executor::NativeExecutionDispatch for DevelopmentRuntimeExecutor {
type ExtendHostFunctions = frame_benchmarking::benchmarking::HostFunctions;
fn dispatch(method: &str, data: &[u8]) -> Option<Vec<u8>> {
development_runtime::api::dispatch(method, data)
}
fn native_version() -> sc_executor::NativeVersion {
development_runtime::native_version()
}
}
/// Native executor instance.
pub struct ShellRuntimeExecutor;
impl sc_executor::NativeExecutionDispatch for ShellRuntimeExecutor {
type ExtendHostFunctions = ();
fn dispatch(method: &str, data: &[u8]) -> Option<Vec<u8>> {
shell_runtime::api::dispatch(method, data)
}
fn native_version() -> sc_executor::NativeVersion {
shell_runtime::native_version()
}
}
/// Starts a `ServiceBuilder` for a full service.
///
/// Use this macro if you don't actually need the full service, but just the builder in order to
/// be able to perform chain operations.
pub fn new_partial<RuntimeApi, Executor, BIQ>(
config: &Configuration,
build_import_queue: BIQ,
) -> Result<
PartialComponents<
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>,
TFullBackend<Block>,
(),
sc_consensus::DefaultImportQueue<
Block,
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>,
>,
sc_transaction_pool::FullPool<
Block,
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>,
>,
(Option<Telemetry>, Option<TelemetryWorkerHandle>),
>,
sc_service::Error,
>
where
RuntimeApi: ConstructRuntimeApi<Block, TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>
+ Send
+ Sync
+ 'static,
RuntimeApi::RuntimeApi: sp_transaction_pool::runtime_api::TaggedTransactionQueue<Block>
+ sp_api::Metadata<Block>
+ sp_session::SessionKeys<Block>
+ sp_api::ApiExt<
Block,
StateBackend = sc_client_api::StateBackendFor<TFullBackend<Block>, Block>,
> + sp_offchain::OffchainWorkerApi<Block>
+ sp_block_builder::BlockBuilder<Block>,
sc_client_api::StateBackendFor<TFullBackend<Block>, Block>: sp_api::StateBackend<BlakeTwo256>,
Executor: NativeExecutionDispatch + 'static,
BIQ: FnOnce(
Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
&Configuration,
Option<TelemetryHandle>,
&TaskManager,
) -> Result<
sc_consensus::DefaultImportQueue<
Block,
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>,
>,
sc_service::Error,
>,
{
let telemetry = config
.telemetry_endpoints
.clone()
.filter(|x| !x.is_empty())
.map(|endpoints| -> Result<_, sc_telemetry::Error> {
let worker = TelemetryWorker::new(16)?;
let telemetry = worker.handle().new_telemetry(endpoints);
Ok((worker, telemetry))
})
.transpose()?;
let executor = sc_executor::NativeElseWasmExecutor::<Executor>::new(
config.wasm_method,
config.default_heap_pages,
config.max_runtime_instances,
);
let (client, backend, keystore_container, task_manager) =
sc_service::new_full_parts::<Block, RuntimeApi, _>(
&config,
telemetry.as_ref().map(|(_, telemetry)| telemetry.handle()),
executor,
)?;
let client = Arc::new(client);
let telemetry_worker_handle = telemetry.as_ref().map(|(worker, _)| worker.handle());
let telemetry = telemetry.map(|(worker, telemetry)| {
task_manager.spawn_handle().spawn(
"telemetry",
sc_service::DEFAULT_GROUP_NAME,
worker.run()
);
telemetry
});
let transaction_pool = sc_transaction_pool::BasicPool::new_full(
config.transaction_pool.clone(),
config.role.is_authority().into(),
config.prometheus_registry(),
task_manager.spawn_essential_handle(),
client.clone(),
);
let import_queue = build_import_queue(
client.clone(),
config,
telemetry.as_ref().map(|telemetry| telemetry.handle()),
&task_manager,
)?;
let params = PartialComponents {
backend,
client,
import_queue,
keystore_container,
task_manager,
transaction_pool,
select_chain: (),
other: (telemetry, telemetry_worker_handle),
};
Ok(params)
}
/// Start a shell node with the given parachain `Configuration` and relay chain `Configuration`.
///
/// This is the actual implementation that is abstract over the executor and the runtime api for shell nodes.
#[sc_tracing::logging::prefix_logs_with("Parachain")]
async fn start_shell_node_impl<RuntimeApi, Executor, RB, BIQ, BIC>(
parachain_config: Configuration,
polkadot_config: Configuration,
id: ParaId,
rpc_ext_builder: RB,
build_import_queue: BIQ,
build_consensus: BIC,
) -> sc_service::error::Result<(
TaskManager,
Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
)>
where
RuntimeApi: ConstructRuntimeApi<Block, TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>
+ Send
+ Sync
+ 'static,
RuntimeApi::RuntimeApi: sp_transaction_pool::runtime_api::TaggedTransactionQueue<Block>
+ sp_api::Metadata<Block>
+ sp_session::SessionKeys<Block>
+ sp_api::ApiExt<
Block,
StateBackend = sc_client_api::StateBackendFor<TFullBackend<Block>, Block>,
> + sp_offchain::OffchainWorkerApi<Block>
+ sp_block_builder::BlockBuilder<Block>
+ cumulus_primitives_core::CollectCollationInfo<Block>,
sc_client_api::StateBackendFor<TFullBackend<Block>, Block>: sp_api::StateBackend<BlakeTwo256>,
Executor: sc_executor::NativeExecutionDispatch + 'static,
RB: Fn(
Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
) -> Result<jsonrpc_core::IoHandler<sc_rpc::Metadata>, sc_service::Error>
+ Send
+ 'static,
BIQ: FnOnce(
Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
&Configuration,
Option<TelemetryHandle>,
&TaskManager,
) -> Result<
sc_consensus::DefaultImportQueue<
Block,
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>,
>,
sc_service::Error,
>,
BIC: FnOnce(
Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
Option<&Registry>,
Option<TelemetryHandle>,
&TaskManager,
&polkadot_service::NewFull<polkadot_service::Client>,
Arc<
sc_transaction_pool::FullPool<
Block,
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>,
>,
>,
Arc<NetworkService<Block, Hash>>,
SyncCryptoStorePtr,
bool,
) -> Result<Box<dyn ParachainConsensus<Block>>, sc_service::Error>,
{
if matches!(parachain_config.role, Role::Light) {
return Err("Light client not supported!".into());
}
let parachain_config = prepare_node_config(parachain_config);
let params = new_partial::<RuntimeApi, Executor, BIQ>(¶chain_config, build_import_queue)?;
let (mut telemetry, telemetry_worker_handle) = params.other;
let relay_chain_full_node =
cumulus_client_service::build_polkadot_full_node(polkadot_config, telemetry_worker_handle)
.map_err(|e| match e {
polkadot_service::Error::Sub(x) => x,
s => format!("{}", s).into(),
})?;
let client = params.client.clone();
let backend = params.backend.clone();
let block_announce_validator = build_block_announce_validator(
relay_chain_full_node.client.clone(),
id,
Box::new(relay_chain_full_node.network.clone()),
relay_chain_full_node.backend.clone(),
);
let force_authoring = parachain_config.force_authoring;
let validator = parachain_config.role.is_authority();
let prometheus_registry = parachain_config.prometheus_registry().cloned();
let transaction_pool = params.transaction_pool.clone();
let mut task_manager = params.task_manager;
let import_queue = cumulus_client_service::SharedImportQueue::new(params.import_queue);
let (network, system_rpc_tx, start_network) =
sc_service::build_network(sc_service::BuildNetworkParams {
config: ¶chain_config,
client: client.clone(),
transaction_pool: transaction_pool.clone(),
spawn_handle: task_manager.spawn_handle(),
import_queue: import_queue.clone(),
block_announce_validator_builder: Some(Box::new(|_| block_announce_validator)),
warp_sync: None,
})?;
if parachain_config.offchain_worker.enabled {
sc_service::build_offchain_workers(
¶chain_config,
task_manager.spawn_handle(),
client.clone(),
network.clone(),
);
}
let rpc_client = client.clone();
let rpc_extensions_builder = Box::new(move |_, _| rpc_ext_builder(rpc_client.clone()));
sc_service::spawn_tasks(sc_service::SpawnTasksParams {
rpc_extensions_builder,
client: client.clone(),
transaction_pool: transaction_pool.clone(),
task_manager: &mut task_manager,
config: parachain_config,
keystore: params.keystore_container.sync_keystore(),
backend: backend.clone(),
network: network.clone(),
system_rpc_tx,
telemetry: telemetry.as_mut(),
})?;
let announce_block = {
let network = network.clone();
Arc::new(move |hash, data| network.announce_block(hash, data))
};
if validator {
let parachain_consensus = build_consensus(
client.clone(),
prometheus_registry.as_ref(),
telemetry.as_ref().map(|t| t.handle()),
&task_manager,
&relay_chain_full_node,
transaction_pool,
network,
params.keystore_container.sync_keystore(),
force_authoring,
)?;
let spawner = task_manager.spawn_handle();
let params = StartCollatorParams {
para_id: id,
block_status: client.clone(),
announce_block,
client: client.clone(),
task_manager: &mut task_manager,
relay_chain_full_node,
spawner,
parachain_consensus,
import_queue,
};
start_collator(params).await?;
} else {
let params = StartFullNodeParams {
client: client.clone(),
announce_block,
task_manager: &mut task_manager,
para_id: id,
relay_chain_full_node,
};
start_full_node(params)?;
}
start_network.start_network();
Ok((task_manager, client))
}
/// Start a node with the given parachain `Configuration` and relay chain `Configuration`.
///
/// This is the actual implementation that is abstract over the executor and the runtime api.
#[sc_tracing::logging::prefix_logs_with("Parachain")]
async fn start_node_impl<RuntimeApi, Executor, RB, BIQ, BIC>(
parachain_config: Configuration,
polkadot_config: Configuration,
id: ParaId,
_rpc_ext_builder: RB,
build_import_queue: BIQ, | ) -> sc_service::error::Result<(
TaskManager,
Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
)>
where
RuntimeApi: ConstructRuntimeApi<Block, TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>
+ Send
+ Sync
+ 'static,
RuntimeApi::RuntimeApi: sp_transaction_pool::runtime_api::TaggedTransactionQueue<Block>
+ sp_api::Metadata<Block>
+ sp_session::SessionKeys<Block>
+ sp_api::ApiExt<
Block,
StateBackend = sc_client_api::StateBackendFor<TFullBackend<Block>, Block>,
> + sp_offchain::OffchainWorkerApi<Block>
+ sp_block_builder::BlockBuilder<Block>
+ cumulus_primitives_core::CollectCollationInfo<Block>
+ pallet_transaction_payment_rpc::TransactionPaymentRuntimeApi<Block, Balance>
+ frame_rpc_system::AccountNonceApi<Block, AccountId, Nonce>,
sc_client_api::StateBackendFor<TFullBackend<Block>, Block>: sp_api::StateBackend<BlakeTwo256>,
Executor: sc_executor::NativeExecutionDispatch + 'static,
RB: Fn(
Arc<TFullClient<Block, RuntimeApi, Executor>>,
) -> Result<jsonrpc_core::IoHandler<sc_rpc::Metadata>, sc_service::Error>
+ Send
+ 'static,
BIQ: FnOnce(
Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
&Configuration,
Option<TelemetryHandle>,
&TaskManager,
) -> Result<
sc_consensus::DefaultImportQueue<
Block,
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>,
>,
sc_service::Error,
> + 'static,
BIC: FnOnce(
Arc<TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>>,
Option<&Registry>,
Option<TelemetryHandle>,
&TaskManager,
&polkadot_service::NewFull<polkadot_service::Client>,
Arc<
sc_transaction_pool::FullPool<
Block,
TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<Executor>>,
>,
>,
Arc<NetworkService<Block, Hash>>,
SyncCryptoStorePtr,
bool,
) -> Result<Box<dyn ParachainConsensus<Block>>, sc_service::Error>,
{
if matches!(parachain_config.role, Role::Light) {
return Err("Light client not supported!".into());
}
let parachain_config = prepare_node_config(parachain_config);
let params = new_partial::<RuntimeApi, Executor, BIQ>(¶chain_config, build_import_queue)?;
let (mut telemetry, telemetry_worker_handle) = params.other;
let relay_chain_full_node =
cumulus_client_service::build_polkadot_full_node(polkadot_config, telemetry_worker_handle)
.map_err(|e| match e {
polkadot_service::Error::Sub(x) => x,
s => format!("{}", s).into(),
})?;
let client = params.client.clone();
let backend = params.backend.clone();
let block_announce_validator = build_block_announce_validator(
relay_chain_full_node.client.clone(),
id,
Box::new(relay_chain_full_node.network.clone()),
relay_chain_full_node.backend.clone(),
);
let force_authoring = parachain_config.force_authoring;
let validator = parachain_config.role.is_authority();
let prometheus_registry = parachain_config.prometheus_registry().cloned();
let transaction_pool = params.transaction_pool.clone();
let mut task_manager = params.task_manager;
let import_queue = cumulus_client_service::SharedImportQueue::new(params.import_queue);
let (network, system_rpc_tx, start_network) =
sc_service::build_network(sc_service::BuildNetworkParams {
config: ¶chain_config,
client: client.clone(),
transaction_pool: transaction_pool.clone(),
spawn_handle: task_manager.spawn_handle(),
import_queue: import_queue.clone(),
block_announce_validator_builder: Some(Box::new(|_| block_announce_validator)),
warp_sync: None,
})?;
if parachain_config.offchain_worker.enabled {
sc_service::build_offchain_workers(
¶chain_config,
task_manager.spawn_handle(),
client.clone(),
network.clone(),
);
}
let rpc_extensions_builder = {
let client = client.clone();
let transaction_pool = transaction_pool.clone();
Box::new(move |deny_unsafe, _| {
let deps = rpc::FullDeps {
client: client.clone(),
pool: transaction_pool.clone(),
deny_unsafe,
};
Ok(rpc::create_full(deps))
})
};
sc_service::spawn_tasks(sc_service::SpawnTasksParams {
rpc_extensions_builder,
client: client.clone(),
transaction_pool: transaction_pool.clone(),
task_manager: &mut task_manager,
config: parachain_config,
keystore: params.keystore_container.sync_keystore(),
backend: backend.clone(),
network: network.clone(),
system_rpc_tx,
telemetry: telemetry.as_mut(),
})?;
let announce_block = {
let network = network.clone();
Arc::new(move |hash, data| network.announce_block(hash, data))
};
if validator {
let parachain_consensus = build_consensus(
client.clone(),
prometheus_registry.as_ref(),
telemetry.as_ref().map(|t| t.handle()),
&task_manager,
&relay_chain_full_node,
transaction_pool,
network,
params.keystore_container.sync_keystore(),
force_authoring,
)?;
let spawner = task_manager.spawn_handle();
let params = StartCollatorParams {
para_id: id,
block_status: client.clone(),
announce_block,
client: client.clone(),
task_manager: &mut task_manager,
relay_chain_full_node,
spawner,
parachain_consensus,
import_queue,
};
start_collator(params).await?;
} else {
let params = StartFullNodeParams {
client: client.clone(),
announce_block,
task_manager: &mut task_manager,
para_id: id,
relay_chain_full_node,
};
start_full_node(params)?;
}
start_network.start_network();
Ok((task_manager, client))
}
/// Build the import queue for the development parachain runtime.
pub fn build_development_import_queue(
client: Arc<
TFullClient<
Block,
development_runtime::RuntimeApi,
NativeElseWasmExecutor<DevelopmentRuntimeExecutor>,
>,
>,
config: &Configuration,
telemetry: Option<TelemetryHandle>,
task_manager: &TaskManager,
) -> Result<
sc_consensus::DefaultImportQueue<
Block,
TFullClient<
Block,
development_runtime::RuntimeApi,
NativeElseWasmExecutor<DevelopmentRuntimeExecutor>,
>,
>,
sc_service::Error,
> {
let slot_duration = cumulus_client_consensus_aura::slot_duration(&*client)?;
cumulus_client_consensus_aura::import_queue::<
sp_consensus_aura::sr25519::AuthorityPair,
_,
_,
_,
_,
_,
_,
>(cumulus_client_consensus_aura::ImportQueueParams {
block_import: client.clone(),
client: client.clone(),
create_inherent_data_providers: move |_, _| async move {
let time = sp_timestamp::InherentDataProvider::from_system_time();
let slot =
sp_consensus_aura::inherents::InherentDataProvider::from_timestamp_and_duration(
*time,
slot_duration.slot_duration(),
);
Ok((time, slot))
},
registry: config.prometheus_registry().clone(),
can_author_with: sp_consensus::CanAuthorWithNativeVersion::new(client.executor().clone()),
spawner: &task_manager.spawn_essential_handle(),
telemetry,
})
.map_err(Into::into)
}
/// Start a development parachain node.
pub async fn start_development_node(
parachain_config: Configuration,
polkadot_config: Configuration,
id: ParaId,
) -> sc_service::error::Result<(
TaskManager,
Arc<
TFullClient<
Block,
development_runtime::RuntimeApi,
NativeElseWasmExecutor<DevelopmentRuntimeExecutor>,
>,
>,
)> {
start_node_impl::<development_runtime::RuntimeApi, DevelopmentRuntimeExecutor, _, _, _>(
parachain_config,
polkadot_config,
id,
|_| Ok(Default::default()),
build_development_import_queue,
|client,
prometheus_registry,
telemetry,
task_manager,
relay_chain_node,
transaction_pool,
sync_oracle,
keystore,
force_authoring| {
let slot_duration = cumulus_client_consensus_aura::slot_duration(&*client)?;
let proposer_factory = sc_basic_authorship::ProposerFactory::with_proof_recording(
task_manager.spawn_handle(),
client.clone(),
transaction_pool,
prometheus_registry.clone(),
telemetry.clone(),
);
let relay_chain_backend = relay_chain_node.backend.clone();
let relay_chain_client = relay_chain_node.client.clone();
Ok(build_aura_consensus::<
sp_consensus_aura::sr25519::AuthorityPair,
_,
_,
_,
_,
_,
_,
_,
_,
_,
>(BuildAuraConsensusParams {
proposer_factory,
create_inherent_data_providers: move |_, (relay_parent, validation_data)| {
let parachain_inherent =
cumulus_primitives_parachain_inherent::ParachainInherentData::create_at_with_client(
relay_parent,
&relay_chain_client,
&*relay_chain_backend,
&validation_data,
id,
);
async move {
let time = sp_timestamp::InherentDataProvider::from_system_time();
let slot =
sp_consensus_aura::inherents::InherentDataProvider::from_timestamp_and_duration(
*time,
slot_duration.slot_duration(),
);
let parachain_inherent = parachain_inherent.ok_or_else(|| {
Box::<dyn std::error::Error + Send + Sync>::from(
"Failed to create parachain inherent",
)
})?;
Ok((time, slot, parachain_inherent))
}
},
block_import: client.clone(),
relay_chain_client: relay_chain_node.client.clone(),
relay_chain_backend: relay_chain_node.backend.clone(),
para_client: client.clone(),
backoff_authoring_blocks: Option::<()>::None,
sync_oracle,
keystore,
force_authoring,
slot_duration,
// We got around 500ms for proposing
block_proposal_slot_portion: SlotProportion::new(1f32 / 24f32),
// And a maximum of 750ms if slots are skipped
max_block_proposal_slot_portion: Some(SlotProportion::new(1f32 / 16f32)),
telemetry,
}))
},
)
.await
}
/// Build the import queue for the shell runtime.
pub fn build_shell_import_queue(
client: Arc<
TFullClient<Block, shell_runtime::RuntimeApi, NativeElseWasmExecutor<ShellRuntimeExecutor>>,
>,
config: &Configuration,
_: Option<TelemetryHandle>,
task_manager: &TaskManager,
) -> Result<
sc_consensus::DefaultImportQueue<
Block,
TFullClient<Block, shell_runtime::RuntimeApi, NativeElseWasmExecutor<ShellRuntimeExecutor>>,
>,
sc_service::Error,
> {
cumulus_client_consensus_relay_chain::import_queue(
client.clone(),
client,
|_, _| async { Ok(()) },
&task_manager.spawn_essential_handle(),
config.prometheus_registry().clone(),
)
.map_err(Into::into)
}
/// Start a polkadot-shell parachain node.
pub async fn start_shell_node(
parachain_config: Configuration,
polkadot_config: Configuration,
id: ParaId,
) -> sc_service::error::Result<(
TaskManager,
Arc<
TFullClient<Block, shell_runtime::RuntimeApi, NativeElseWasmExecutor<ShellRuntimeExecutor>>,
>,
)> {
start_shell_node_impl::<
shell_runtime::RuntimeApi,
ShellRuntimeExecutor,
_,
_,
_,
>(
parachain_config,
polkadot_config,
id,
|_| Ok(Default::default()),
build_shell_import_queue,
|client,
prometheus_registry,
telemetry,
task_manager,
relay_chain_node,
transaction_pool,
_,
_,
_| {
let proposer_factory = sc_basic_authorship::ProposerFactory::with_proof_recording(
task_manager.spawn_handle(),
client.clone(),
transaction_pool,
prometheus_registry.clone(),
telemetry.clone(),
);
let relay_chain_backend = relay_chain_node.backend.clone();
let relay_chain_client = relay_chain_node.client.clone();
Ok(
cumulus_client_consensus_relay_chain::build_relay_chain_consensus(
cumulus_client_consensus_relay_chain::BuildRelayChainConsensusParams {
para_id: id,
proposer_factory,
block_import: client.clone(),
relay_chain_client: relay_chain_node.client.clone(),
relay_chain_backend: relay_chain_node.backend.clone(),
create_inherent_data_providers:
move |_, (relay_parent, validation_data)| {
let parachain_inherent =
cumulus_primitives_parachain_inherent::ParachainInherentData::create_at_with_client(
relay_parent,
&relay_chain_client,
&*relay_chain_backend,
&validation_data,
id,
);
async move {
let parachain_inherent =
parachain_inherent.ok_or_else(|| {
Box::<dyn std::error::Error + Send + Sync>::from(
"Failed to create parachain inherent",
)
})?;
Ok(parachain_inherent)
}
},
},
),
)
},
)
.await
}
enum BuildOnAccess<R> {
Uninitialized(Option<Box<dyn FnOnce() -> R + Send + Sync>>),
Initialized(R),
}
impl<R> BuildOnAccess<R> {
fn get_mut(&mut self) -> &mut R {
loop {
match self {
Self::Uninitialized(f) => {
*self = Self::Initialized((f.take().unwrap())());
}
Self::Initialized(ref mut r) => return r,
}
}
}
}
/// Special [`ParachainConsensus`] implementation that waits for the upgrade from
/// shell to a parachain runtime that implements Aura.
struct WaitForAuraConsensus<Client> {
client: Arc<Client>,
aura_consensus: Arc<Mutex<BuildOnAccess<Box<dyn ParachainConsensus<Block>>>>>,
relay_chain_consensus: Arc<Mutex<Box<dyn ParachainConsensus<Block>>>>,
}
impl<Client> Clone for WaitForAuraConsensus<Client> {
fn clone(&self) -> Self {
Self {
client: self.client.clone(),
aura_consensus: self.aura_consensus.clone(),
relay_chain_consensus: self.relay_chain_consensus.clone(),
}
}
}
#[async_trait::async_trait]
impl<Client> ParachainConsensus<Block> for WaitForAuraConsensus<Client>
where
Client: sp_api::ProvideRuntimeApi<Block> + Send + Sync,
Client::Api: AuraApi<Block, AuraId>,
{
async fn produce_candidate(
&mut self,
parent: &Header,
relay_parent: PHash,
validation_data: &PersistedValidationData,
) -> Option<ParachainCandidate<Block>> {
let block_id = BlockId::hash(parent.hash());
if self
.client
.runtime_api()
.has_api::<dyn AuraApi<Block, AuraId>>(&block_id)
.unwrap_or(false)
{
self.aura_consensus
.lock()
.await
.get_mut()
.produce_candidate(parent, relay_parent, validation_data)
.await
} else {
self.relay_chain_consensus
.lock()
.await
.produce_candidate(parent, relay_parent, validation_data)
.await
}
}
}
struct Verifier<Client> {
client: Arc<Client>,
aura_verifier: BuildOnAccess<Box<dyn VerifierT<Block>>>,
relay_chain_verifier: Box<dyn VerifierT<Block>>,
}
#[async_trait::async_trait]
impl<Client> VerifierT<Block> for Verifier<Client>
where
Client: sp_api::ProvideRuntimeApi<Block> + Send + Sync,
Client::Api: AuraApi<Block, AuraId>,
{
async fn verify(
&mut self,
block_import: BlockImportParams<Block, ()>,
) -> Result<
(
BlockImportParams<Block, ()>,
Option<Vec<(CacheKeyId, Vec<u8>)>>,
),
String,
> {
let block_id = BlockId::hash(*block_import.header.parent_hash());
if self
.client
.runtime_api()
.has_api::<dyn AuraApi<Block, AuraId>>(&block_id)
.unwrap_or(false)
{
self.aura_verifier.get_mut().verify(block_import).await
} else {
self.relay_chain_verifier.verify(block_import).await
}
}
} | build_consensus: BIC, |
footer.tsx | import * as React from 'react';
import { Link, useStaticQuery, graphql } from 'gatsby';
import SocialProfile from '../../components/social-profile/social-profile';
import FooterWrapper, {
FooterCol,
Logo,
Infos,
FooterTitle,
FooterContent,
Menu,
} from './footer.style';
import LogoImage from '../../images/logo.svg';
import {
IoLogoFacebook,
IoLogoTwitter,
IoLogoInstagram,
IoLogoLinkedin,
} from 'react-icons/io';
const MenuItems = [
{
label: 'About',
url: '/about',
},
{
label: 'Contact',
url: '/contact',
},
{
label: '404 Page',
url: '/404',
},
];
const SocialLinks = [
{
icon: <IoLogoFacebook />,
url: 'https://www.facebook.com/redqinc/',
},
{
icon: <IoLogoInstagram />,
url: 'https://www.instagram.com/redqinc/',
},
{
icon: <IoLogoTwitter />,
url: 'https://twitter.com/redqinc',
},
{
icon: <IoLogoLinkedin />,
url: 'https://www.linkedin.com/company/redqinc/',
},
];
type FooterProps = {
children: React.ReactNode;
};
const Footer: React.FunctionComponent<FooterProps> = ({
children,
...props
}) => {
const Data = useStaticQuery(graphql`
query {
allMarkdownRemark {
group(field: frontmatter___categories) {
fieldValue
}
}
}
`);
const Category = Data.allMarkdownRemark.group;
return (
<FooterWrapper {...props}>
<FooterCol>
<Logo>
<Link to="/">
<img src={LogoImage} alt="logo" />
</Link>
</Logo>
<Infos>23 King Street, 5th Avenue, New York</Infos>
<Infos>+1-2345-6789-9</Infos>
<br />
<Infos>
Copyright ©
<a href="https://redq.io/"> RedQ, Inc.</a>
</Infos>
</FooterCol>
<FooterCol>
<FooterTitle>Quick Links</FooterTitle>
<FooterContent>
{MenuItems.map((item, index) => (
<Menu key={index} to={item.url}>
{item.label}
</Menu>
))}
</FooterContent>
</FooterCol>
<FooterCol>
<FooterTitle>Category</FooterTitle>
<FooterContent>
{Category.slice(0, 4).map((cat: any, index: any) => (
<Menu key={index} to={`/category/${cat.fieldValue}`}>
{cat.fieldValue}
</Menu>
))}
</FooterContent>
</FooterCol>
<FooterCol>
| </FooterWrapper>
);
};
export default Footer; | <FooterTitle>Follow Us</FooterTitle>
<SocialProfile items={SocialLinks} />
</FooterCol>
|
item.rs | use serde::{Serialize, Deserialize};
use crate::SteamID;
use crate::response::attributes::{Attributes, Value as AttributeValue};
use crate::response::deserializers::{
deserialize_attributes,
from_optional_number_or_string
};
use tf2_enum::{
Wear, Quality, KillstreakTier, Paint, StrangePart, Killstreaker, Sheen, Origin,
Spell, FootprintsSpell, PaintSpell, Attribute, Attributes as EnumAttributes
};
#[derive(Serialize, Deserialize, PartialEq, Clone, Debug)]
pub struct Item {
pub defindex: u32,
pub quality: Quality,
#[serde(default)]
pub flag_cannot_craft: bool,
#[serde(default)]
#[serde(deserialize_with = "from_optional_number_or_string")]
pub id: Option<u64>,
#[serde(default)]
#[serde(deserialize_with = "from_optional_number_or_string")]
pub original_id: Option<u64>,
#[serde(default)]
#[serde(deserialize_with = "from_optional_number_or_string")]
pub level: Option<u8>,
#[serde(default)]
#[serde(deserialize_with = "from_optional_number_or_string")]
pub inventory: Option<u32>,
#[serde(default)]
#[serde(deserialize_with = "from_optional_number_or_string")]
pub quantity: Option<u32>,
#[serde(default)]
pub origin: Option<Origin>,
#[serde(default)]
#[serde(deserialize_with = "deserialize_attributes")]
pub attributes: Attributes,
pub marketplace_price: Option<f32>,
pub marketplace_bot_steamid: Option<SteamID>,
pub marketplace_sku: Option<String>,
pub marketplace_image: Option<String>,
}
fn convert_float_u32(float: f64) -> Option<u32> {
let int = float as u32;
if int as f64 == float {
Some(int)
} else {
None
}
}
impl Item {
pub fn get_quality(&self) -> Quality {
self.quality.clone()
}
// todo - I may change these to return explicit errors later on
// (attribute does not exist, failed to parse attribute)
// in addition, most of these methods can be written in a more generic way
pub fn get_particle_value(&self) -> Option<u32> {
if let Some(attribute) = self.attributes.get(&134) {
if let Some(float_value) = attribute.float_value {
return convert_float_u32(float_value);
}
}
None
}
pub fn get_skin_value(&self) -> Option<u32> {
if let Some(attribute) = self.attributes.get(&834) {
if let Some(AttributeValue::Number(value)) = attribute.value {
if let Ok(value) = u32::try_from(value) {
return Some(value);
}
}
}
None
}
pub fn get_killstreak_tier(&self) -> Option<KillstreakTier> {
if let Some(attribute) = self.attributes.get(&(KillstreakTier::DEFINDEX as i32)) {
if let Some(float_value) = attribute.float_value {
if let Some(float_value) = convert_float_u32(float_value) {
if let Ok(killstreak_tier) = KillstreakTier::try_from(float_value) {
return Some(killstreak_tier);
}
}
}
}
None
}
pub fn get_wear(&self) -> Option<Wear> {
if let Some(attribute) = self.attributes.get(&(Wear::DEFINDEX as i32)) {
if let Some(float_value) = attribute.float_value {
if let Ok(wear) = Wear::try_from(float_value) {
return Some(wear);
}
}
}
None
}
pub fn get_spells(&self) -> Option<Vec<Spell>> {
let spells = Spell::DEFINDEX
.iter()
.filter_map(|defindex| {
if let Some(attribute) = self.attributes.get(&(*defindex as i32)) {
match *defindex { | return Some(Spell::Footprints(spell));
}
}
}
None
},
Spell::DEFINDEX_PAINT => {
if let Some(float_value) = attribute.float_value {
if let Some(float_value) = convert_float_u32(float_value) {
if let Ok(spell) = PaintSpell::try_from(float_value) {
return Some(Spell::Paint(spell));
}
}
}
None
},
Spell::DEFINDEX_VOICES_FROM_BELOW => Some(Spell::VoicesFromBelow),
Spell::DEFINDEX_PUMPKIN_BOMBS => Some(Spell::PumpkinBombs),
Spell::DEFINDEX_HALLOWEEN_FIRE => Some(Spell::HalloweenFire),
Spell::DEFINDEX_EXORCISM => Some(Spell::Exorcism),
_ => None,
}
} else {
None
}
})
.collect::<Vec<Spell>>();
if !spells.is_empty() {
Some(spells)
} else {
None
}
}
pub fn get_strange_parts(&self) -> Option<Vec<StrangePart>> {
let strange_parts = StrangePart::DEFINDEX
.iter()
.filter_map(|defindex| {
self.attributes.get(&(*defindex as i32))
.and_then(|attribute| attribute.float_value)
.and_then(|float_value| {
if let Some(float_value) = convert_float_u32(float_value) {
if let Ok(strange_part) = StrangePart::try_from(float_value) {
return Some(strange_part);
}
}
None
})
})
.collect::<Vec<StrangePart>>();
if !strange_parts.is_empty() {
Some(strange_parts)
} else {
None
}
}
pub fn get_paint(&self) -> Option<Paint> {
if self.defindex < 5027 || self.defindex > 5077 {
if let Some(attribute) = self.attributes.get(&(Paint::DEFINDEX as i32)) {
if let Some(float_value) = attribute.float_value {
if let Some(float_value) = convert_float_u32(float_value) {
if let Ok(paint) = Paint::try_from(float_value) {
return Some(paint);
}
}
}
}
}
None
}
pub fn get_killstreaker(&self) -> Option<Killstreaker> {
if let Some(attribute) = self.attributes.get(&(Killstreaker::DEFINDEX as i32)) {
if let Some(float_value) = attribute.float_value {
if let Some(float_value) = convert_float_u32(float_value) {
if let Ok(killstreaker) = Killstreaker::try_from(float_value) {
return Some(killstreaker);
}
}
}
}
None
}
pub fn get_sheen(&self) -> Option<Sheen> {
if let Some(attribute) = self.attributes.get(&(Sheen::DEFINDEX as i32)) {
if let Some(float_value) = attribute.float_value {
if let Some(float_value) = convert_float_u32(float_value) {
if let Ok(sheen) = Sheen::try_from(float_value) {
return Some(sheen);
}
}
}
}
None
}
pub fn is_craftable(&self) -> bool {
!self.flag_cannot_craft
}
pub fn is_australium(&self) -> bool {
self.attributes.contains_key(&2027)
}
pub fn is_festivized(&self) -> bool {
self.attributes.contains_key(&2053)
}
pub fn is_strange(&self) -> bool {
// strange quality items are not "strangified" items
if self.quality == Quality::Strange {
false
} else {
self.attributes.contains_key(&214)
}
}
} | Spell::DEFINDEX_FOOTPRINTS => {
if let Some(float_value) = attribute.float_value {
if let Some(float_value) = convert_float_u32(float_value) {
if let Ok(spell) = FootprintsSpell::try_from(float_value) { |
fallback_route_properties_py3.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class FallbackRouteProperties(Model):
"""The properties of the fallback route. IoT Hub uses these properties when it
routes messages to the fallback endpoint.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param name: The name of the route. The name can only include alphanumeric
characters, periods, underscores, hyphens, has a maximum length of 64
characters, and must be unique.
:type name: str
:ivar source: Required. The source to which the routing rule is to be
applied to. For example, DeviceMessages. Default value: "DeviceMessages" .
:vartype source: str
:param condition: The condition which is evaluated in order to apply the
fallback route. If the condition is not provided it will evaluate to true
by default. For grammar, See:
https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language
:type condition: str
:param endpoint_names: Required. The list of endpoints to which the
messages that satisfy the condition are routed to. Currently only 1
endpoint is allowed.
:type endpoint_names: list[str]
:param is_enabled: Required. Used to specify whether the fallback route is
enabled.
:type is_enabled: bool
"""
_validation = {
'source': {'required': True, 'constant': True},
'endpoint_names': {'required': True, 'max_items': 1, 'min_items': 1},
'is_enabled': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'condition': {'key': 'condition', 'type': 'str'},
'endpoint_names': {'key': 'endpointNames', 'type': '[str]'},
'is_enabled': {'key': 'isEnabled', 'type': 'bool'},
}
source = "DeviceMessages"
def | (self, *, endpoint_names, is_enabled: bool, name: str=None, condition: str=None, **kwargs) -> None:
super(FallbackRouteProperties, self).__init__(**kwargs)
self.name = name
self.condition = condition
self.endpoint_names = endpoint_names
self.is_enabled = is_enabled
| __init__ |
with_rate.rs | use crate::request_rate::{
api::Api, api_rate::ApiRate,
target_rate::TargetRate, RequestRate,
}; // use crate::request_rate
use std::time::Duration;
use stream_throttle::{ThrottleRate, ThrottlePool};
impl RequestRate {
/// Specifies the request rate for the selected API. _Do not use this method
/// to set request rate limits, use `ClientSettings.with_rate()` instead_.
///
/// ## Arguments:
///
/// * `api` ‧ Which Google Maps API are you setting the rate limit for? For
/// example, `Api::Directions`, `Api::DistanceMatrix`, `Api::Elevation`,
/// `Api::Geocoding`, `Api::TimeZone`, and so on. The `Api::All` rate limit
/// is applied to all Google Maps API requests _in addition_ to the per-API
/// rate limits.
///
/// * `requests` ‧ The number of requests the client library is attempting
/// to target. For example, _2 requests_ per 1 hour.
///
/// * `duration` ‧ The duration for the targeted request rate. For example,
/// 1 request _per 1 minute_. This can be defined using the
/// `std::time::Duration` methods.
///
/// ## Examples:
///
/// * Sets the rate limit for all Google Maps API requests to _2 request per
/// minute_:
/// ```rust
/// with_rate(Api::All, 2, Duration::from_secs(60)) // 1 minute
/// ```
///
/// * Sets the rate limit for Google Maps Elevation API requests to _1
/// requests per second_:
/// ```rust
/// with_rate(Api::All, 1, Duration::from_secs(1)) // 1 second
/// ```
///
/// * This method can be stacked:
/// ```rust
/// with_rate(Api::All, 1, Duration::from_secs(60)) // 1 minute
/// with_rate(Api::Directions, 1, Duration::from_secs(3_600)) // 1 hour
/// with_rate(Api::TimeZone, 2, Duration::from_secs(60)) // 1 second
/// ```
pub fn with_rate(
&mut self,
api: Api,
requests: u16,
duration: Duration
) -> &mut RequestRate {
|
} // impl | // Select `RequestRate` field for the API specified by the caller.
let api_ref = self.rate_map.get_mut(&api); // api
let throttle_pool = match requests {
0 => None,
_ => {
let throttle_rate = ThrottleRate::new(requests as usize, duration);
Some(ThrottlePool::new(throttle_rate))
}
};
// Has the ApiRate been set already?
match api_ref {
// If not, initialize the structure:
None => {
self.rate_map.insert(api.clone(), ApiRate {
target_rate: TargetRate { requests, duration },
throttle_pool,
});
}
// If it has, set the new target request rate but preserve the
// current effective request rate:
Some(api_rate) => {
*api_rate = ApiRate {
// Set new target request rate:
target_rate: TargetRate { requests, duration },
throttle_pool,
};
} // ApiRate
} // match
self
} // fn |
statestorage.go | package telebot
import "sync"
type stateStorage struct {
m sync.Mutex
data map[int64]string
defaultState string
}
func newStateStorage() *stateStorage {
return &stateStorage{
data: map[int64]string{},
defaultState: "Welcome",
} | }
func (ss *stateStorage) setDefaultState(defaultState string) *stateStorage {
ss.defaultState = defaultState
return ss
}
func (ss *stateStorage) SetUserState(userId int64, state string) error {
ss.m.Lock()
defer ss.m.Unlock()
ss.data[userId] = state
return nil
}
func (ss *stateStorage) UserState(userId int64) (state string, err error) {
var ok bool
if state, ok = ss.data[userId]; ok {
return
}
state = ss.defaultState
return
} | |
client_test.go | package main
import (
"bytes"
log "github.com/Sirupsen/logrus"
"io"
"strings"
)
type TestClient struct {
logger *log.Entry
reader io.Reader
output *bytes.Buffer
closed bool
}
func NewTestClient(data string) *TestClient |
func (tc *TestClient) Log() *log.Entry {
return tc.logger
}
func (tc *TestClient) Read(p []byte) (n int, err error) {
return tc.reader.Read(p)
}
func (tc *TestClient) Write(p []byte) (n int, err error) {
return tc.output.Write(p)
}
func (tc *TestClient) Close() {
tc.closed = true
}
func (tc *TestClient) HandleErrors() {
err := recover()
if err == nil {
return
}
switch err := err.(type) {
case *RequestError:
err.Format(tc)
if err.code >= 500 {
tc.logger.Error(err)
panic(err)
}
default:
panic(err)
}
}
| {
return &TestClient{
reader: strings.NewReader(data),
output: new(bytes.Buffer),
logger: log.WithFields(
log.Fields{
"test": true,
}),
}
} |
checkpoints.go | // Copyright (c) 2013-2016 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package blockchain
import (
"fmt"
"time"
"github.com/aviator-coding/bchd/chaincfg"
"github.com/aviator-coding/bchd/chaincfg/chainhash"
"github.com/aviator-coding/bchd/txscript"
"github.com/aviator-coding/bchutil"
)
// CheckpointConfirmations is the number of blocks before the end of the current
// best block chain that a good checkpoint candidate must be.
const CheckpointConfirmations = 2016
// newHashFromStr converts the passed big-endian hex string into a
// chainhash.Hash. It only differs from the one available in chainhash in that
// it ignores the error since it will only (and must only) be called with
// hard-coded, and therefore known good, hashes.
func newHashFromStr(hexStr string) *chainhash.Hash |
// Checkpoints returns a slice of checkpoints (regardless of whether they are
// already known). When there are no checkpoints for the chain, it will return
// nil.
//
// This function is safe for concurrent access.
func (b *BlockChain) Checkpoints() []chaincfg.Checkpoint {
return b.checkpoints
}
// HasCheckpoints returns whether this BlockChain has checkpoints defined.
//
// This function is safe for concurrent access.
func (b *BlockChain) HasCheckpoints() bool {
return len(b.checkpoints) > 0
}
// LatestCheckpoint returns the most recent checkpoint (regardless of whether it
// is already known). When there are no defined checkpoints for the active chain
// instance, it will return nil.
//
// This function is safe for concurrent access.
func (b *BlockChain) LatestCheckpoint() *chaincfg.Checkpoint {
if !b.HasCheckpoints() {
return nil
}
return &b.checkpoints[len(b.checkpoints)-1]
}
// verifyCheckpoint returns whether the passed block height and hash combination
// match the checkpoint data. It also returns true if there is no checkpoint
// data for the passed block height.
func (b *BlockChain) verifyCheckpoint(height int32, hash *chainhash.Hash) bool {
if !b.HasCheckpoints() {
return true
}
// Nothing to check if there is no checkpoint data for the block height.
checkpoint, exists := b.checkpointsByHeight[height]
if !exists {
return true
}
if !checkpoint.Hash.IsEqual(hash) {
return false
}
log.Infof("Verified checkpoint at height %d/block %s", checkpoint.Height,
checkpoint.Hash)
return true
}
// findPreviousCheckpoint finds the most recent checkpoint that is already
// available in the downloaded portion of the block chain and returns the
// associated block node. It returns nil if a checkpoint can't be found (this
// should really only happen for blocks before the first checkpoint).
//
// This function MUST be called with the chain lock held (for reads).
func (b *BlockChain) findPreviousCheckpoint() (*blockNode, error) {
if !b.HasCheckpoints() {
return nil, nil
}
// Perform the initial search to find and cache the latest known
// checkpoint if the best chain is not known yet or we haven't already
// previously searched.
checkpoints := b.checkpoints
numCheckpoints := len(checkpoints)
if b.checkpointNode == nil && b.nextCheckpoint == nil {
// Loop backwards through the available checkpoints to find one
// that is already available.
for i := numCheckpoints - 1; i >= 0; i-- {
node := b.index.LookupNode(checkpoints[i].Hash)
if node == nil || !b.bestChain.Contains(node) {
continue
}
// Checkpoint found. Cache it for future lookups and
// set the next expected checkpoint accordingly.
b.checkpointNode = node
if i < numCheckpoints-1 {
b.nextCheckpoint = &checkpoints[i+1]
}
return b.checkpointNode, nil
}
// No known latest checkpoint. This will only happen on blocks
// before the first known checkpoint. So, set the next expected
// checkpoint to the first checkpoint and return the fact there
// is no latest known checkpoint block.
b.nextCheckpoint = &checkpoints[0]
return nil, nil
}
// At this point we've already searched for the latest known checkpoint,
// so when there is no next checkpoint, the current checkpoint lockin
// will always be the latest known checkpoint.
if b.nextCheckpoint == nil {
return b.checkpointNode, nil
}
// When there is a next checkpoint and the height of the current best
// chain does not exceed it, the current checkpoint lockin is still
// the latest known checkpoint.
if b.bestChain.Tip().height < b.nextCheckpoint.Height {
return b.checkpointNode, nil
}
// We've reached or exceeded the next checkpoint height. Note that
// once a checkpoint lockin has been reached, forks are prevented from
// any blocks before the checkpoint, so we don't have to worry about the
// checkpoint going away out from under us due to a chain reorganize.
// Cache the latest known checkpoint for future lookups. Note that if
// this lookup fails something is very wrong since the chain has already
// passed the checkpoint which was verified as accurate before inserting
// it.
checkpointNode := b.index.LookupNode(b.nextCheckpoint.Hash)
if checkpointNode == nil {
return nil, AssertError(fmt.Sprintf("findPreviousCheckpoint "+
"failed lookup of known good block node %s",
b.nextCheckpoint.Hash))
}
b.checkpointNode = checkpointNode
// Set the next expected checkpoint.
checkpointIndex := -1
for i := numCheckpoints - 1; i >= 0; i-- {
if checkpoints[i].Hash.IsEqual(b.nextCheckpoint.Hash) {
checkpointIndex = i
break
}
}
b.nextCheckpoint = nil
if checkpointIndex != -1 && checkpointIndex < numCheckpoints-1 {
b.nextCheckpoint = &checkpoints[checkpointIndex+1]
}
return b.checkpointNode, nil
}
// isNonstandardTransaction determines whether a transaction contains any
// scripts which are not one of the standard types.
func isNonstandardTransaction(tx *bchutil.Tx) bool {
// Check all of the output public key scripts for non-standard scripts.
for _, txOut := range tx.MsgTx().TxOut {
scriptClass := txscript.GetScriptClass(txOut.PkScript)
if scriptClass == txscript.NonStandardTy {
return true
}
}
return false
}
// IsCheckpointCandidate returns whether or not the passed block is a good
// checkpoint candidate.
//
// The factors used to determine a good checkpoint are:
// - The block must be in the main chain
// - The block must be at least 'CheckpointConfirmations' blocks prior to the
// current end of the main chain
// - The timestamps for the blocks before and after the checkpoint must have
// timestamps which are also before and after the checkpoint, respectively
// (due to the median time allowance this is not always the case)
// - The block must not contain any strange transaction such as those with
// nonstandard scripts
//
// The intent is that candidates are reviewed by a developer to make the final
// decision and then manually added to the list of checkpoints for a network.
//
// This function is safe for concurrent access.
func (b *BlockChain) IsCheckpointCandidate(block *bchutil.Block) (bool, error) {
b.chainLock.RLock()
defer b.chainLock.RUnlock()
// A checkpoint must be in the main chain.
node := b.index.LookupNode(block.Hash())
if node == nil || !b.bestChain.Contains(node) {
return false, nil
}
// Ensure the height of the passed block and the entry for the block in
// the main chain match. This should always be the case unless the
// caller provided an invalid block.
if node.height != block.Height() {
return false, fmt.Errorf("passed block height of %d does not "+
"match the main chain height of %d", block.Height(),
node.height)
}
// A checkpoint must be at least CheckpointConfirmations blocks
// before the end of the main chain.
mainChainHeight := b.bestChain.Tip().height
if node.height > (mainChainHeight - CheckpointConfirmations) {
return false, nil
}
// A checkpoint must be have at least one block after it.
//
// This should always succeed since the check above already made sure it
// is CheckpointConfirmations back, but be safe in case the constant
// changes.
nextNode := b.bestChain.Next(node)
if nextNode == nil {
return false, nil
}
// A checkpoint must be have at least one block before it.
if node.parent == nil {
return false, nil
}
// A checkpoint must have timestamps for the block and the blocks on
// either side of it in order (due to the median time allowance this is
// not always the case).
prevTime := time.Unix(node.parent.timestamp, 0)
curTime := block.MsgBlock().Header.Timestamp
nextTime := time.Unix(nextNode.timestamp, 0)
if prevTime.After(curTime) || nextTime.Before(curTime) {
return false, nil
}
// A checkpoint must have transactions that only contain standard
// scripts.
for _, tx := range block.Transactions() {
if isNonstandardTransaction(tx) {
return false, nil
}
}
// All of the checks passed, so the block is a candidate.
return true, nil
}
| {
hash, _ := chainhash.NewHashFromStr(hexStr)
return hash
} |
upgrade_legacy.go | package cmd
import (
"context"
"fmt"
"strings"
"github.com/golang/protobuf/ptypes"
"github.com/linkerd/linkerd2/cli/flag"
pb "github.com/linkerd/linkerd2/controller/gen/config"
charts "github.com/linkerd/linkerd2/pkg/charts/linkerd2"
"github.com/linkerd/linkerd2/pkg/healthcheck"
"github.com/linkerd/linkerd2/pkg/issuercerts"
"github.com/linkerd/linkerd2/pkg/k8s"
"github.com/linkerd/linkerd2/pkg/version"
"github.com/spf13/pflag"
corev1 "k8s.io/api/core/v1"
"k8s.io/client-go/kubernetes"
)
func loadStoredValuesLegacy(ctx context.Context, k *k8s.KubernetesAPI) (*charts.Values, error) {
// We fetch the configs directly from kubernetes because we need to be able
// to upgrade/reinstall the control plane when the API is not available; and
// this also serves as a passive check that we have privileges to access this
// control plane.
_, configs, err := healthcheck.FetchLinkerdConfigMap(ctx, k, controlPlaneNamespace)
if err != nil {
return nil, fmt.Errorf("could not fetch configs from kubernetes: %s", err)
}
if configs == nil {
return nil, nil
}
repairConfigs(configs)
values, err := charts.NewValues()
if err != nil {
return nil, err
}
allStageFlags, allStageFlagSet := makeAllStageFlags(values)
installFlags, installFlagSet := makeInstallFlags(values)
upgradeFlags, installUpgradeFlagSet, err := makeInstallUpgradeFlags(values)
if err != nil {
return nil, err
}
proxyFlags, proxyFlagSet := makeProxyFlags(values)
flagSet := pflag.NewFlagSet("loaded_flags", pflag.ExitOnError)
flagSet.AddFlagSet(allStageFlagSet)
flagSet.AddFlagSet(installFlagSet)
flagSet.AddFlagSet(installUpgradeFlagSet)
flagSet.AddFlagSet(proxyFlagSet)
setFlagsFromInstall(flagSet, configs.GetInstall().GetFlags())
flags := flattenFlags(allStageFlags, installFlags, upgradeFlags, proxyFlags)
err = flag.ApplySetFlags(values, flags)
if err != nil {
return nil, err
}
idctx := configs.GetGlobal().GetIdentityContext()
if idctx.GetTrustDomain() != "" && idctx.GetTrustAnchorsPem() != "" {
err = fetchIdentityValues(ctx, k, idctx, values)
if err != nil {
return nil, err
}
}
return values, nil
}
func setFlagsFromInstall(flags *pflag.FlagSet, installFlags []*pb.Install_Flag) {
for _, i := range installFlags {
if f := flags.Lookup(i.GetName()); f != nil && !f.Changed {
// The function recordFlags() stores the string representation of flags in the ConfigMap
// so a stringSlice is stored e.g. as [a,b].
// To avoid having f.Value.Set() interpreting that as a string we need to remove
// the brackets
value := i.GetValue()
if f.Value.Type() == "stringSlice" {
value = strings.Trim(value, "[]")
}
f.Value.Set(value)
f.Changed = true
}
}
}
func repairConfigs(configs *pb.All) {
// Repair the "install" section; install flags are updated separately
if configs.Install == nil {
configs.Install = &pb.Install{}
}
// ALWAYS update the CLI version to the most recent.
configs.Install.CliVersion = version.Version
// Repair the "proxy" section
if configs.Proxy == nil {
configs.Proxy = &pb.Proxy{}
}
if configs.Proxy.DebugImage == nil {
configs.Proxy.DebugImage = &pb.Image{}
}
if configs.GetProxy().GetDebugImage().GetImageName() == "" {
configs.Proxy.DebugImage.ImageName = k8s.DebugSidecarImage
}
if configs.GetProxy().GetDebugImageVersion() == "" {
configs.Proxy.DebugImageVersion = version.Version
}
}
// fetchIdentityValue checks the kubernetes API to fetch an existing
// linkerd identity configuration.
//
// This bypasses the public API so that we can access secrets and validate
// permissions.
func fetchIdentityValues(ctx context.Context, k kubernetes.Interface, idctx *pb.IdentityContext, values *charts.Values) error {
if idctx == nil {
return nil
}
if idctx.Scheme == "" {
// if this is empty, then we are upgrading from a version
// that did not support issuer schemes. Just default to the
// linkerd one.
idctx.Scheme = k8s.IdentityIssuerSchemeLinkerd
}
var trustAnchorsPEM string
var issuerData *issuercerts.IssuerCertData
var err error
trustAnchorsPEM = idctx.GetTrustAnchorsPem()
issuerData, err = fetchIssuer(ctx, k, trustAnchorsPEM, idctx.Scheme)
if err != nil {
return err
}
clockSkewDuration, err := ptypes.Duration(idctx.GetClockSkewAllowance())
if err != nil {
return fmt.Errorf("could not convert clock skew protobuf Duration format into golang Duration: %s", err)
}
issuanceLifetimeDuration, err := ptypes.Duration(idctx.GetIssuanceLifetime())
if err != nil {
return fmt.Errorf("could not convert issuance Lifetime protobuf Duration format into golang Duration: %s", err)
}
values.IdentityTrustAnchorsPEM = trustAnchorsPEM
values.Identity.Issuer.Scheme = idctx.Scheme
values.Identity.Issuer.ClockSkewAllowance = clockSkewDuration.String()
values.Identity.Issuer.IssuanceLifetime = issuanceLifetimeDuration.String()
values.Identity.Issuer.TLS.KeyPEM = issuerData.IssuerKey
values.Identity.Issuer.TLS.CrtPEM = issuerData.IssuerCrt
return nil
}
func | (ctx context.Context, k kubernetes.Interface, trustPEM string, scheme string) (*issuercerts.IssuerCertData, error) {
var (
issuerData *issuercerts.IssuerCertData
err error
)
switch scheme {
case string(corev1.SecretTypeTLS):
// Do not return external issuer certs as no need of storing them in config and upgrade secrets
// Also contradicts condition in https://github.com/linkerd/linkerd2/blob/main/cli/cmd/options.go#L550
return &issuercerts.IssuerCertData{}, nil
default:
issuerData, err = issuercerts.FetchIssuerData(ctx, k, trustPEM, controlPlaneNamespace)
if issuerData != nil && issuerData.TrustAnchors != trustPEM {
issuerData.TrustAnchors = trustPEM
}
}
if err != nil {
return nil, err
}
return issuerData, nil
}
| fetchIssuer |
image.rs | pub struct Edges {
top: bool,
bottom: bool,
left: bool,
right: bool,
}
#[derive(Debug, Clone)]
pub struct Image {
pub id: usize,
pub data: Vec< Vec<usize> >,
}
impl Image {
pub fn new() -> Self {
Image {
id: 0,
data: Vec::new(),
}
}
/// Print the image data row by row
pub fn print(&self) {
println!("\nTile {}:", self.id);
for line in &self.data {
for c in line {
match c {
0 => print!("."),
1 => print!("#"),
_ => print!("O"),
}
}
println!("");
}
}
/// Get a row from the image can be a positive or negative integer
/// If a negative integer returns rows offset from the end (i.e. -1 will return the last row)
///
/// # Arguments
///
/// * `r` the row to return
pub fn row(&self, r: i32) -> Option< Vec<usize> > {
if r.abs() as usize > self.data.len() {
return None
}
if r >= 0 {
Some(self.data[r as usize].clone())
} else {
Some(self.data[self.data.len() - r.abs() as usize].clone())
}
}
/// Get a column from the image can be a positive or negative integer
/// If a negative integer returns column offset from the end (i.e. -1 will return the last column)
///
/// # Arguments
///
/// * `c` the row to return
pub fn col(&self, c: i32) -> Option< Vec<usize> > {
if c.abs() as usize > self.data[0].len() {
return None
}
if c >= 0 {
Some(self.data.iter().map(|v| v[c as usize]).collect::<Vec<usize>>())
} else {
Some(self.data.iter().map(|v| v[v.len() - c.abs() as usize]).collect::<Vec<usize>>())
}
}
/// Rotate the image through an angle (must be 90, 180 or 270 degrees)
///
/// # Arguments
///
/// * `angle` the angle to rotate through
pub fn rotate(&mut self, angle: u32) {
match angle {
90 => {
let mut new_data = Vec::new();
for i in 0..self.data[0].len() as i32 {
let mut row = self.col(i).unwrap();
row.reverse();
new_data.push(row);
}
self.data = new_data;
}
180 => {
let mut new_data = Vec::new();
for i in 0..self.data.len() as i32 {
let mut row = self.row(-i-1).unwrap();
row.reverse();
new_data.push(row);
}
self.data = new_data;
}
270 => {
let mut new_data = Vec::new();
for i in 0..self.data[0].len() as i32 {
let row = self.col(-i-1).unwrap();
new_data.push(row);
}
self.data = new_data;
}
_ => ()
};
}
/// Flips horizontally.
/// 1 1 1 1 1 1
/// i.e. 1 1 0 => 0 1 1
/// 1 0 0 0 0 1
pub fn flip_horizontal(&mut self) {
let mut new_data = Vec::new();
for i in 0..self.data.len() as i32 {
let mut new_row = self.row(i).unwrap();
new_row.reverse();
new_data.push(new_row);
}
self.data = new_data;
}
/// Flips vertically (i.e. rows)
/// 1 1 1 1 0 0
/// i.e. 1 1 0 => 1 1 0
/// 1 0 0 1 0 0
pub fn flip_vertical(&mut self) {
let mut new_data = Vec::new();
for i in 0..self.data.len() as i32 {
let new_row = self.row(-i-1).unwrap();
new_data.push(new_row);
}
self.data = new_data;
}
/// Returns the data without the border
pub fn trim(&self, e: Edges) -> Vec<Vec<usize>> {
let mut picture = Vec::new();
let max = self.data.len() - 1;
for r in 0..max {
if (e.top && r == 0) || (e.bottom && r == max) {
continue;
}
let mut row = Vec::new();
for c in 0..max {
if (e.left && c == 0) || (e.right && c == max) {
continue;
}
row.push(self.data[r][c]);
}
picture.push(row);
}
picture
}
}
#[derive(Debug, Clone)]
pub struct ImageAssembler {
image_array: Vec<Vec<Image>>,
size: (usize, usize),
}
impl ImageAssembler {
pub fn new(x: usize, y: usize) -> Self {
ImageAssembler {
image_array: vec!(vec!(Image::new(); y); x),
size: (x, y),
}
}
/// Insert an image at the given x,y position
/// Resizes the image_array as required
pub fn insert(&mut self, i: Image, (x, y): (usize, usize)) {
if x >= self.size.0 || y >= self.size.1 {
panic!("Index ({},{}) out of bounds ({},{})", x, y, self.size.0, self.size.1);
}
// Remove borders from the image
let copy = i.clone();
let edges_to_trim = Edges {
top: true,
bottom: true,
left: true,
right: true,
};
self.image_array[x][y] = Image {
id: copy.id,
data: copy.trim(edges_to_trim),
};
}
fn matching_edges(tile: &Image, others: &Vec<Image>) -> Edges {
let mut matches = Edges {
top: false,
bottom: false,
left: false,
right: false,
};
let self_rows_and_cols = vec!(tile.row(0), tile.row(-1), tile.col(0), tile.col(-1));
for other in others {
if tile.id == other.id {
continue;
}
let other_rows_and_cols = vec!(other.row(0), other.row(-1), other.col(0), other.col(-1));
for (i, v1) in self_rows_and_cols.iter().enumerate() {
for (_, v2) in other_rows_and_cols.iter().enumerate() {
let mut temp = v2.clone().unwrap();
temp.reverse();
let v2_backwards = Some(temp);
if v1 == v2 || v1 == &v2_backwards {
match i {
0 => matches.top = true,
1 => matches.bottom = true,
2 => matches.left = true,
3 => matches.right = true,
_ => (),
}
}
}
}
}
matches
}
fn find_next_row(x: &Image, tiles: &Vec<Image>) -> Option<Image> {
// Column to match (last column)
let c = x.col(-1).unwrap();
for tile in tiles {
if tile.id == x.id {
continue;
}
let r1 = tile.row(0).unwrap();
let rl = tile.row(-1).unwrap();
let c1 = tile.col(0).unwrap();
let cl = tile.col(-1).unwrap();
let r1b = r1.iter().rev().map(|x| x.clone()).collect::<Vec<usize>>();
let rlb = rl.iter().rev().map(|x| x.clone()).collect::<Vec<usize>>();
let c1b = c1.iter().rev().map(|x| x.clone()).collect::<Vec<usize>>();
let clb = cl.iter().rev().map(|x| x.clone()).collect::<Vec<usize>>();
match c {
_ if c == r1 => {
let mut t = tile.clone();
t.rotate(270);
t.flip_vertical();
return Some(t)
}
_ if c == r1b => {
let mut t = tile.clone();
t.rotate(270);
return Some(t)
}
_ if c == rl => {
let mut t = tile.clone();
t.rotate(90);
return Some(t)
}
_ if c == rlb => {
let mut t = tile.clone();
t.rotate(90);
t.flip_vertical();
return Some(t)
}
_ if c == c1 => {
let t = tile.clone();
return Some(t)
}
_ if c == c1b => {
let mut t = tile.clone();
t.flip_vertical();
return Some(t)
}
_ if c == cl => {
let mut t = tile.clone();
t.flip_horizontal();
return Some(t)
}
_ if c == clb => {
let mut t = tile.clone();
t.rotate(180);
return Some(t)
}
_ => (),
}
}
None
}
fn find_next_column (x: &Image, tiles: &Vec<Image>) -> Option<Image> {
// Column to match (last column)
let r = x.row(-1).unwrap();
for tile in tiles {
if tile.id == x.id {
continue;
}
let r1 = tile.row(0).unwrap();
let rl = tile.row(-1).unwrap();
let c1 = tile.col(0).unwrap();
let cl = tile.col(-1).unwrap();
let r1b = r1.iter().rev().map(|x| x.clone()).collect::<Vec<usize>>();
let rlb = rl.iter().rev().map(|x| x.clone()).collect::<Vec<usize>>();
let c1b = c1.iter().rev().map(|x| x.clone()).collect::<Vec<usize>>();
let clb = cl.iter().rev().map(|x| x.clone()).collect::<Vec<usize>>();
match r {
_ if r == r1 => {
let t = tile.clone();
return Some(t)
}
_ if r == r1b => {
let mut t = tile.clone();
t.flip_horizontal();
return Some(t)
}
_ if r == rl => {
let mut t = tile.clone();
t.flip_vertical();
return Some(t)
}
_ if r == rlb => {
let mut t = tile.clone();
t.rotate(180);
return Some(t)
}
_ if r == c1 => {
let mut t = tile.clone();
t.rotate(90);
t.flip_horizontal();
return Some(t)
}
_ if r == c1b => {
let mut t = tile.clone();
t.rotate(90);
return Some(t)
}
_ if r == cl => {
let mut t = tile.clone();
t.rotate(270);
return Some(t)
}
_ if r == clb => {
let mut t = tile.clone();
t.rotate(270);
t.flip_horizontal();
return Some(t)
}
_ => (),
}
}
None
}
fn collapse_image(&self) -> Image {
let mut output_image = Vec::new();
let mut row_offset = 0;
let mut r = 0;
let mut is_first = true;
for image_row in &self.image_array {
for image in image_row {
for (i, row) in image.data.iter().enumerate() {
if is_first { | output_image[row_offset + i].append(&mut row.clone());
}
}
is_first = false;
}
row_offset += r;
r = 0;
is_first = true;
}
Image {
id: 9999,
data: output_image,
}
}
pub fn assemble(&mut self, tiles: &Vec<Image>) -> Image {
let mut positions = Vec::new();
// First we need to find a corner tile
let mut start_tile: Image = Image::new();
for tile in tiles {
let edges = Self::matching_edges(tile, tiles);
match (edges.top, edges.right, edges.bottom, edges.left) {
(true, true, false, false) => { start_tile = tile.clone(); start_tile.rotate(90) } // Top and Right
(false, true, true, false) => { start_tile = tile.clone(); } // Bottom and Right
(false, false, true, true) => { start_tile = tile.clone(); start_tile.rotate(270)} // Bottom and Left
(true, false, false, true) => { start_tile = tile.clone(); start_tile.rotate(180)} // Top and Left
_ => (), // Not a corner
}
}
// Assuming that tiles won't ever have an id of 0
if start_tile.id == 0 {
panic!("No corner tiels found");
}
// Starting with the corner tile - we find all connected tiles in the row
// then we drop down to the next row
let mut left_column_tile = start_tile.clone();
let mut r = 0;
let mut c = 0;
loop {
let mut current_tile = left_column_tile.clone();
self.insert(current_tile.clone(), (r, c));
c += 1;
let mut row = vec!(current_tile.id);
while let Some(next_tile) = Self::find_next_row(¤t_tile, tiles) {
row.push(next_tile.id);
self.insert(next_tile.clone(), (r, c));
current_tile = next_tile;
c += 1; // Incerement the column
}
positions.push(row);
r += 1; // Increment the row
c = 0;
if let Some(t) = Self::find_next_column(&left_column_tile, tiles) {
left_column_tile = t.clone();
} else {
break;
}
}
// for row in &positions {
// println!("{:?}", row);
// }
self.collapse_image()
}
} | output_image.push(row.clone());
r += 1;
} else { |
allowed_flex_volume.rs | // Generated from definition io.k8s.api.policy.v1beta1.AllowedFlexVolume
/// AllowedFlexVolume represents a single Flexvolume that is allowed to be used.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct AllowedFlexVolume {
/// driver is the name of the Flexvolume driver.
pub driver: String,
}
impl<'de> crate::serde::Deserialize<'de> for AllowedFlexVolume {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_driver,
Other,
}
impl<'de> crate::serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: crate::serde::de::Error {
Ok(match v {
"driver" => Field::Key_driver,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = AllowedFlexVolume;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("AllowedFlexVolume")
}
fn | <A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: crate::serde::de::MapAccess<'de> {
let mut value_driver: Option<String> = None;
while let Some(key) = crate::serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_driver => value_driver = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: crate::serde::de::IgnoredAny = crate::serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(AllowedFlexVolume {
driver: value_driver.unwrap_or_default(),
})
}
}
deserializer.deserialize_struct(
"AllowedFlexVolume",
&[
"driver",
],
Visitor,
)
}
}
impl crate::serde::Serialize for AllowedFlexVolume {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: crate::serde::Serializer {
let mut state = serializer.serialize_struct(
"AllowedFlexVolume",
1,
)?;
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "driver", &self.driver)?;
crate::serde::ser::SerializeStruct::end(state)
}
}
#[cfg(feature = "schemars")]
impl crate::schemars::JsonSchema for AllowedFlexVolume {
fn schema_name() -> String {
"io.k8s.api.policy.v1beta1.AllowedFlexVolume".to_owned()
}
fn json_schema(__gen: &mut crate::schemars::gen::SchemaGenerator) -> crate::schemars::schema::Schema {
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("AllowedFlexVolume represents a single Flexvolume that is allowed to be used.".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::Object))),
object: Some(Box::new(crate::schemars::schema::ObjectValidation {
properties: IntoIterator::into_iter([
(
"driver".to_owned(),
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("driver is the name of the Flexvolume driver.".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::String))),
..Default::default()
}),
),
]).collect(),
required: IntoIterator::into_iter([
"driver",
]).map(std::borrow::ToOwned::to_owned).collect(),
..Default::default()
})),
..Default::default()
})
}
}
| visit_map |
proc_macro_server.rs | use crate::base::ExtCtxt;
use rustc_parse::{nt_to_tokenstream, parse_stream_from_source_str};
use syntax::ast;
use syntax::print::pprust;
use syntax::sess::ParseSess;
use syntax::token;
use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
use syntax::util::comments;
use syntax_pos::symbol::{kw, sym, Symbol};
use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
use errors::Diagnostic;
use rustc_data_structures::sync::Lrc;
use pm::bridge::{server, TokenTree};
use pm::{Delimiter, Level, LineColumn, Spacing};
use std::ops::Bound;
use std::{ascii, panic};
trait FromInternal<T> {
fn from_internal(x: T) -> Self;
}
trait ToInternal<T> {
fn to_internal(self) -> T;
}
impl FromInternal<token::DelimToken> for Delimiter {
fn | (delim: token::DelimToken) -> Delimiter {
match delim {
token::Paren => Delimiter::Parenthesis,
token::Brace => Delimiter::Brace,
token::Bracket => Delimiter::Bracket,
token::NoDelim => Delimiter::None,
}
}
}
impl ToInternal<token::DelimToken> for Delimiter {
fn to_internal(self) -> token::DelimToken {
match self {
Delimiter::Parenthesis => token::Paren,
Delimiter::Brace => token::Brace,
Delimiter::Bracket => token::Bracket,
Delimiter::None => token::NoDelim,
}
}
}
impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
for TokenTree<Group, Punct, Ident, Literal>
{
fn from_internal(
((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec<Self>),
) -> Self {
use syntax::token::*;
let joint = is_joint == Joint;
let Token { kind, span } = match tree {
tokenstream::TokenTree::Delimited(span, delim, tts) => {
let delimiter = Delimiter::from_internal(delim);
return TokenTree::Group(Group { delimiter, stream: tts.into(), span });
}
tokenstream::TokenTree::Token(token) => token,
};
macro_rules! tt {
($ty:ident { $($field:ident $(: $value:expr)*),+ $(,)? }) => (
TokenTree::$ty(self::$ty {
$($field $(: $value)*,)+
span,
})
);
($ty:ident::$method:ident($($value:expr),*)) => (
TokenTree::$ty(self::$ty::$method($($value,)* span))
);
}
macro_rules! op {
($a:expr) => {
tt!(Punct::new($a, joint))
};
($a:expr, $b:expr) => {{
stack.push(tt!(Punct::new($b, joint)));
tt!(Punct::new($a, true))
}};
($a:expr, $b:expr, $c:expr) => {{
stack.push(tt!(Punct::new($c, joint)));
stack.push(tt!(Punct::new($b, true)));
tt!(Punct::new($a, true))
}};
}
match kind {
Eq => op!('='),
Lt => op!('<'),
Le => op!('<', '='),
EqEq => op!('=', '='),
Ne => op!('!', '='),
Ge => op!('>', '='),
Gt => op!('>'),
AndAnd => op!('&', '&'),
OrOr => op!('|', '|'),
Not => op!('!'),
Tilde => op!('~'),
BinOp(Plus) => op!('+'),
BinOp(Minus) => op!('-'),
BinOp(Star) => op!('*'),
BinOp(Slash) => op!('/'),
BinOp(Percent) => op!('%'),
BinOp(Caret) => op!('^'),
BinOp(And) => op!('&'),
BinOp(Or) => op!('|'),
BinOp(Shl) => op!('<', '<'),
BinOp(Shr) => op!('>', '>'),
BinOpEq(Plus) => op!('+', '='),
BinOpEq(Minus) => op!('-', '='),
BinOpEq(Star) => op!('*', '='),
BinOpEq(Slash) => op!('/', '='),
BinOpEq(Percent) => op!('%', '='),
BinOpEq(Caret) => op!('^', '='),
BinOpEq(And) => op!('&', '='),
BinOpEq(Or) => op!('|', '='),
BinOpEq(Shl) => op!('<', '<', '='),
BinOpEq(Shr) => op!('>', '>', '='),
At => op!('@'),
Dot => op!('.'),
DotDot => op!('.', '.'),
DotDotDot => op!('.', '.', '.'),
DotDotEq => op!('.', '.', '='),
Comma => op!(','),
Semi => op!(';'),
Colon => op!(':'),
ModSep => op!(':', ':'),
RArrow => op!('-', '>'),
LArrow => op!('<', '-'),
FatArrow => op!('=', '>'),
Pound => op!('#'),
Dollar => op!('$'),
Question => op!('?'),
SingleQuote => op!('\''),
Ident(name, false) if name == kw::DollarCrate => tt!(Ident::dollar_crate()),
Ident(name, is_raw) => tt!(Ident::new(name, is_raw)),
Lifetime(name) => {
let ident = ast::Ident::new(name, span).without_first_quote();
stack.push(tt!(Ident::new(ident.name, false)));
tt!(Punct::new('\'', true))
}
Literal(lit) => tt!(Literal { lit }),
DocComment(c) => {
let style = comments::doc_comment_style(&c.as_str());
let stripped = comments::strip_doc_comment_decoration(&c.as_str());
let mut escaped = String::new();
for ch in stripped.chars() {
escaped.extend(ch.escape_debug());
}
let stream = vec![
Ident(sym::doc, false),
Eq,
TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
]
.into_iter()
.map(|kind| tokenstream::TokenTree::token(kind, span))
.collect();
stack.push(TokenTree::Group(Group {
delimiter: Delimiter::Bracket,
stream,
span: DelimSpan::from_single(span),
}));
if style == ast::AttrStyle::Inner {
stack.push(tt!(Punct::new('!', false)));
}
tt!(Punct::new('#', false))
}
Interpolated(nt) => {
let stream = nt_to_tokenstream(&nt, sess, span);
TokenTree::Group(Group {
delimiter: Delimiter::None,
stream,
span: DelimSpan::from_single(span),
})
}
OpenDelim(..) | CloseDelim(..) => unreachable!(),
Whitespace | Comment | Shebang(..) | Unknown(..) | Eof => unreachable!(),
}
}
}
impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
fn to_internal(self) -> TokenStream {
use syntax::token::*;
let (ch, joint, span) = match self {
TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span),
TokenTree::Group(Group { delimiter, stream, span }) => {
return tokenstream::TokenTree::Delimited(
span,
delimiter.to_internal(),
stream.into(),
)
.into();
}
TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
return tokenstream::TokenTree::token(Ident(sym, is_raw), span).into();
}
TokenTree::Literal(self::Literal {
lit: token::Lit { kind: token::Integer, symbol, suffix },
span,
}) if symbol.as_str().starts_with("-") => {
let minus = BinOp(BinOpToken::Minus);
let symbol = Symbol::intern(&symbol.as_str()[1..]);
let integer = TokenKind::lit(token::Integer, symbol, suffix);
let a = tokenstream::TokenTree::token(minus, span);
let b = tokenstream::TokenTree::token(integer, span);
return vec![a, b].into_iter().collect();
}
TokenTree::Literal(self::Literal {
lit: token::Lit { kind: token::Float, symbol, suffix },
span,
}) if symbol.as_str().starts_with("-") => {
let minus = BinOp(BinOpToken::Minus);
let symbol = Symbol::intern(&symbol.as_str()[1..]);
let float = TokenKind::lit(token::Float, symbol, suffix);
let a = tokenstream::TokenTree::token(minus, span);
let b = tokenstream::TokenTree::token(float, span);
return vec![a, b].into_iter().collect();
}
TokenTree::Literal(self::Literal { lit, span }) => {
return tokenstream::TokenTree::token(Literal(lit), span).into();
}
};
let kind = match ch {
'=' => Eq,
'<' => Lt,
'>' => Gt,
'!' => Not,
'~' => Tilde,
'+' => BinOp(Plus),
'-' => BinOp(Minus),
'*' => BinOp(Star),
'/' => BinOp(Slash),
'%' => BinOp(Percent),
'^' => BinOp(Caret),
'&' => BinOp(And),
'|' => BinOp(Or),
'@' => At,
'.' => Dot,
',' => Comma,
';' => Semi,
':' => Colon,
'#' => Pound,
'$' => Dollar,
'?' => Question,
'\'' => SingleQuote,
_ => unreachable!(),
};
let tree = tokenstream::TokenTree::token(kind, span);
TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
}
}
impl ToInternal<errors::Level> for Level {
fn to_internal(self) -> errors::Level {
match self {
Level::Error => errors::Level::Error,
Level::Warning => errors::Level::Warning,
Level::Note => errors::Level::Note,
Level::Help => errors::Level::Help,
_ => unreachable!("unknown proc_macro::Level variant: {:?}", self),
}
}
}
#[derive(Clone)]
pub struct TokenStreamIter {
cursor: tokenstream::Cursor,
stack: Vec<TokenTree<Group, Punct, Ident, Literal>>,
}
#[derive(Clone)]
pub struct Group {
delimiter: Delimiter,
stream: TokenStream,
span: DelimSpan,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct Punct {
ch: char,
// NB. not using `Spacing` here because it doesn't implement `Hash`.
joint: bool,
span: Span,
}
impl Punct {
fn new(ch: char, joint: bool, span: Span) -> Punct {
const LEGAL_CHARS: &[char] = &[
'=', '<', '>', '!', '~', '+', '-', '*', '/', '%', '^', '&', '|', '@', '.', ',', ';',
':', '#', '$', '?', '\'',
];
if !LEGAL_CHARS.contains(&ch) {
panic!("unsupported character `{:?}`", ch)
}
Punct { ch, joint, span }
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct Ident {
sym: Symbol,
is_raw: bool,
span: Span,
}
impl Ident {
fn is_valid(string: &str) -> bool {
let mut chars = string.chars();
if let Some(start) = chars.next() {
rustc_lexer::is_id_start(start) && chars.all(rustc_lexer::is_id_continue)
} else {
false
}
}
fn new(sym: Symbol, is_raw: bool, span: Span) -> Ident {
let string = sym.as_str();
if !Self::is_valid(&string) {
panic!("`{:?}` is not a valid identifier", string)
}
if is_raw && !sym.can_be_raw() {
panic!("`{}` cannot be a raw identifier", string);
}
Ident { sym, is_raw, span }
}
fn dollar_crate(span: Span) -> Ident {
// `$crate` is accepted as an ident only if it comes from the compiler.
Ident { sym: kw::DollarCrate, is_raw: false, span }
}
}
// FIXME(eddyb) `Literal` should not expose internal `Debug` impls.
#[derive(Clone, Debug)]
pub struct Literal {
lit: token::Lit,
span: Span,
}
pub(crate) struct Rustc<'a> {
sess: &'a ParseSess,
def_site: Span,
call_site: Span,
mixed_site: Span,
}
impl<'a> Rustc<'a> {
pub fn new(cx: &'a ExtCtxt<'_>) -> Self {
let expn_data = cx.current_expansion.id.expn_data();
Rustc {
sess: cx.parse_sess,
def_site: cx.with_def_site_ctxt(expn_data.def_site),
call_site: cx.with_call_site_ctxt(expn_data.call_site),
mixed_site: cx.with_mixed_site_ctxt(expn_data.call_site),
}
}
fn lit(&mut self, kind: token::LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Literal {
Literal { lit: token::Lit::new(kind, symbol, suffix), span: server::Span::call_site(self) }
}
}
impl server::Types for Rustc<'_> {
type TokenStream = TokenStream;
type TokenStreamBuilder = tokenstream::TokenStreamBuilder;
type TokenStreamIter = TokenStreamIter;
type Group = Group;
type Punct = Punct;
type Ident = Ident;
type Literal = Literal;
type SourceFile = Lrc<SourceFile>;
type MultiSpan = Vec<Span>;
type Diagnostic = Diagnostic;
type Span = Span;
}
impl server::TokenStream for Rustc<'_> {
fn new(&mut self) -> Self::TokenStream {
TokenStream::default()
}
fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
stream.is_empty()
}
fn from_str(&mut self, src: &str) -> Self::TokenStream {
parse_stream_from_source_str(
FileName::proc_macro_source_code(src),
src.to_string(),
self.sess,
Some(self.call_site),
)
}
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
pprust::tts_to_string(stream.clone())
}
fn from_token_tree(
&mut self,
tree: TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
) -> Self::TokenStream {
tree.to_internal()
}
fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
TokenStreamIter { cursor: stream.trees(), stack: vec![] }
}
}
impl server::TokenStreamBuilder for Rustc<'_> {
fn new(&mut self) -> Self::TokenStreamBuilder {
tokenstream::TokenStreamBuilder::new()
}
fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
builder.push(stream);
}
fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
builder.build()
}
}
impl server::TokenStreamIter for Rustc<'_> {
fn next(
&mut self,
iter: &mut Self::TokenStreamIter,
) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
loop {
let tree = iter.stack.pop().or_else(|| {
let next = iter.cursor.next_with_joint()?;
Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
})?;
// HACK: The condition "dummy span + group with empty delimiter" represents an AST
// fragment approximately converted into a token stream. This may happen, for
// example, with inputs to proc macro attributes, including derives. Such "groups"
// need to flattened during iteration over stream's token trees.
// Eventually this needs to be removed in favor of keeping original token trees
// and not doing the roundtrip through AST.
if let TokenTree::Group(ref group) = tree {
if group.delimiter == Delimiter::None && group.span.entire().is_dummy() {
iter.cursor.append(group.stream.clone());
continue;
}
}
return Some(tree);
}
}
}
impl server::Group for Rustc<'_> {
fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group {
Group { delimiter, stream, span: DelimSpan::from_single(server::Span::call_site(self)) }
}
fn delimiter(&mut self, group: &Self::Group) -> Delimiter {
group.delimiter
}
fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
group.stream.clone()
}
fn span(&mut self, group: &Self::Group) -> Self::Span {
group.span.entire()
}
fn span_open(&mut self, group: &Self::Group) -> Self::Span {
group.span.open
}
fn span_close(&mut self, group: &Self::Group) -> Self::Span {
group.span.close
}
fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
group.span = DelimSpan::from_single(span);
}
}
impl server::Punct for Rustc<'_> {
fn new(&mut self, ch: char, spacing: Spacing) -> Self::Punct {
Punct::new(ch, spacing == Spacing::Joint, server::Span::call_site(self))
}
fn as_char(&mut self, punct: Self::Punct) -> char {
punct.ch
}
fn spacing(&mut self, punct: Self::Punct) -> Spacing {
if punct.joint { Spacing::Joint } else { Spacing::Alone }
}
fn span(&mut self, punct: Self::Punct) -> Self::Span {
punct.span
}
fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
Punct { span, ..punct }
}
}
impl server::Ident for Rustc<'_> {
fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident {
Ident::new(Symbol::intern(string), is_raw, span)
}
fn span(&mut self, ident: Self::Ident) -> Self::Span {
ident.span
}
fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
Ident { span, ..ident }
}
}
impl server::Literal for Rustc<'_> {
// FIXME(eddyb) `Literal` should not expose internal `Debug` impls.
fn debug(&mut self, literal: &Self::Literal) -> String {
format!("{:?}", literal)
}
fn integer(&mut self, n: &str) -> Self::Literal {
self.lit(token::Integer, Symbol::intern(n), None)
}
fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
self.lit(token::Integer, Symbol::intern(n), Some(Symbol::intern(kind)))
}
fn float(&mut self, n: &str) -> Self::Literal {
self.lit(token::Float, Symbol::intern(n), None)
}
fn f32(&mut self, n: &str) -> Self::Literal {
self.lit(token::Float, Symbol::intern(n), Some(sym::f32))
}
fn f64(&mut self, n: &str) -> Self::Literal {
self.lit(token::Float, Symbol::intern(n), Some(sym::f64))
}
fn string(&mut self, string: &str) -> Self::Literal {
let mut escaped = String::new();
for ch in string.chars() {
escaped.extend(ch.escape_debug());
}
self.lit(token::Str, Symbol::intern(&escaped), None)
}
fn character(&mut self, ch: char) -> Self::Literal {
let mut escaped = String::new();
escaped.extend(ch.escape_unicode());
self.lit(token::Char, Symbol::intern(&escaped), None)
}
fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
let string = bytes
.iter()
.cloned()
.flat_map(ascii::escape_default)
.map(Into::<char>::into)
.collect::<String>();
self.lit(token::ByteStr, Symbol::intern(&string), None)
}
fn span(&mut self, literal: &Self::Literal) -> Self::Span {
literal.span
}
fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
literal.span = span;
}
fn subspan(
&mut self,
literal: &Self::Literal,
start: Bound<usize>,
end: Bound<usize>,
) -> Option<Self::Span> {
let span = literal.span;
let length = span.hi().to_usize() - span.lo().to_usize();
let start = match start {
Bound::Included(lo) => lo,
Bound::Excluded(lo) => lo + 1,
Bound::Unbounded => 0,
};
let end = match end {
Bound::Included(hi) => hi + 1,
Bound::Excluded(hi) => hi,
Bound::Unbounded => length,
};
// Bounds check the values, preventing addition overflow and OOB spans.
if start > u32::max_value() as usize
|| end > u32::max_value() as usize
|| (u32::max_value() - start as u32) < span.lo().to_u32()
|| (u32::max_value() - end as u32) < span.lo().to_u32()
|| start >= end
|| end > length
{
return None;
}
let new_lo = span.lo() + BytePos::from_usize(start);
let new_hi = span.lo() + BytePos::from_usize(end);
Some(span.with_lo(new_lo).with_hi(new_hi))
}
}
impl server::SourceFile for Rustc<'_> {
fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool {
Lrc::ptr_eq(file1, file2)
}
fn path(&mut self, file: &Self::SourceFile) -> String {
match file.name {
FileName::Real(ref path) => path
.to_str()
.expect("non-UTF8 file path in `proc_macro::SourceFile::path`")
.to_string(),
_ => file.name.to_string(),
}
}
fn is_real(&mut self, file: &Self::SourceFile) -> bool {
file.is_real_file()
}
}
impl server::MultiSpan for Rustc<'_> {
fn new(&mut self) -> Self::MultiSpan {
vec![]
}
fn push(&mut self, spans: &mut Self::MultiSpan, span: Self::Span) {
spans.push(span)
}
}
impl server::Diagnostic for Rustc<'_> {
fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
let mut diag = Diagnostic::new(level.to_internal(), msg);
diag.set_span(MultiSpan::from_spans(spans));
diag
}
fn sub(
&mut self,
diag: &mut Self::Diagnostic,
level: Level,
msg: &str,
spans: Self::MultiSpan,
) {
diag.sub(level.to_internal(), msg, MultiSpan::from_spans(spans), None);
}
fn emit(&mut self, diag: Self::Diagnostic) {
self.sess.span_diagnostic.emit_diagnostic(&diag);
}
}
impl server::Span for Rustc<'_> {
fn debug(&mut self, span: Self::Span) -> String {
format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0)
}
fn def_site(&mut self) -> Self::Span {
self.def_site
}
fn call_site(&mut self) -> Self::Span {
self.call_site
}
fn mixed_site(&mut self) -> Self::Span {
self.mixed_site
}
fn source_file(&mut self, span: Self::Span) -> Self::SourceFile {
self.sess.source_map().lookup_char_pos(span.lo()).file
}
fn parent(&mut self, span: Self::Span) -> Option<Self::Span> {
span.parent()
}
fn source(&mut self, span: Self::Span) -> Self::Span {
span.source_callsite()
}
fn start(&mut self, span: Self::Span) -> LineColumn {
let loc = self.sess.source_map().lookup_char_pos(span.lo());
LineColumn { line: loc.line, column: loc.col.to_usize() }
}
fn end(&mut self, span: Self::Span) -> LineColumn {
let loc = self.sess.source_map().lookup_char_pos(span.hi());
LineColumn { line: loc.line, column: loc.col.to_usize() }
}
fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
let self_loc = self.sess.source_map().lookup_char_pos(first.lo());
let other_loc = self.sess.source_map().lookup_char_pos(second.lo());
if self_loc.file.name != other_loc.file.name {
return None;
}
Some(first.to(second))
}
fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span {
span.with_ctxt(at.ctxt())
}
fn source_text(&mut self, span: Self::Span) -> Option<String> {
self.sess.source_map().span_to_snippet(span).ok()
}
}
| from_internal |
variant-price-detail.component.ts | import { ChangeDetectionStrategy, Component, Input, OnChanges, OnInit, SimpleChanges } from '@angular/core';
import { BehaviorSubject, combineLatest, Observable, Subject } from 'rxjs';
import { map } from 'rxjs/operators';
import { DataService } from '@vendure/admin-ui/core';
@Component({
selector: 'vdr-variant-price-detail',
templateUrl: './variant-price-detail.component.html',
styleUrls: ['./variant-price-detail.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class VariantPriceDetailComponent implements OnInit, OnChanges { | @Input() taxCategoryId: string;
grossPrice$: Observable<number>;
taxRate$: Observable<number>;
private priceChange$ = new BehaviorSubject<number>(0);
private taxCategoryIdChange$ = new BehaviorSubject<string>('');
constructor(private dataService: DataService) {}
ngOnInit() {
const taxRates$ = this.dataService.settings
.getTaxRates(99999, 0, 'cache-first')
.mapStream(data => data.taxRates.items);
const activeChannel$ = this.dataService.settings
.getActiveChannel('cache-first')
.mapStream(data => data.activeChannel);
this.taxRate$ = combineLatest(activeChannel$, taxRates$, this.taxCategoryIdChange$).pipe(
map(([channel, taxRates, taxCategoryId]) => {
const defaultTaxZone = channel.defaultTaxZone;
if (!defaultTaxZone) {
return 0;
}
const applicableRate = taxRates.find(
taxRate => taxRate.zone.id === defaultTaxZone.id && taxRate.category.id === taxCategoryId,
);
if (!applicableRate) {
return 0;
}
return applicableRate.value;
}),
);
this.grossPrice$ = combineLatest(this.taxRate$, this.priceChange$).pipe(
map(([taxRate, price]) => {
return Math.round(price * ((100 + taxRate) / 100)) / 100;
}),
);
}
ngOnChanges(changes: SimpleChanges): void {
if ('price' in changes) {
this.priceChange$.next(changes.price.currentValue);
}
if ('taxCategoryId' in changes) {
this.taxCategoryIdChange$.next(changes.taxCategoryId.currentValue);
}
}
} | @Input() priceIncludesTax: boolean;
@Input() price: number;
@Input() currencyCode: string; |
file_loop.rs | use crate::appender::{FastLogRecord, LogAppender};
use crate::consts::LogSize;
use crate::plugin::file_split::{FileSplitAppender, RollingType};
use crate::plugin::packer::LogPacker;
/// Single logs are stored in rolling mode by capacity
pub struct FileLoopAppender {
file: FileSplitAppender,
}
impl FileLoopAppender {
pub fn new(log_file_path: &str, max_temp_size: LogSize) -> FileLoopAppender |
}
impl LogAppender for FileLoopAppender {
fn do_logs(&self, records: &[FastLogRecord]) {
self.file.do_logs(records);
}
fn flush(&self) {
self.file.flush();
}
}
| {
Self {
file: FileSplitAppender::new(log_file_path, max_temp_size, RollingType::KeepNum(1), Box::new(LogPacker {}))
}
} |
ping.py | # Copyright (C) 2019 The Raphielscape Company LLC.
#
# Licensed under the Raphielscape Public License, Version 1.d (the "License");
# you may not use this file except in compliance with the License.
#
# ReCode by @mrismanaziz
# FROM Man-Userbot <https://github.com/mrismanaziz/Man-Userbot>
# t.me/SharingUserbot & t.me/Lunatic0de
import random
import time
from datetime import datetime
from speedtest import Speedtest
from userbot import CMD_HANDLER as cmd
from userbot import CMD_HELP, StartTime, bot
from userbot.events import man_cmd, register
from userbot.utils import humanbytes
absen = [
"**Hadir bang** 😁",
"**Hadir kak** 😉",
"**Hadir dong** 😁",
"**Hadir ganteng** 🥵",
"**Hadir bro** 😎",
"**Hadir kak maap telat** 🥺",
]
async def get_readable_time(seconds: int) -> str:
count = 0
up_time = ""
time_list = []
time_suffix_list = ["s", "m", "Jam", "Hari"]
while count < 4:
count += 1
remainder, result = divmod(seconds, 60) if count < 3 else divmod(seconds, 24)
if seconds == 0 and remainder == 0:
break
time_list.append(int(result))
seconds = int(remainder)
for x in range(len(time_list)):
time_list[x] = str(time_list[x]) + time_suffix_list[x]
if len(time_list) == 4:
up_time += time_list.pop() + ", "
time_list.reverse()
up_time += ":".join(time_list)
return up_time
@bot.on(man_cmd(outgoing=True, pattern=r"ping$"))
async def _(ping):
"""For .ping command, ping the userbot from any chat."""
uptime = await get_readable_time((time.time() - StartTime))
start = datetime.now()
await ping.edit("**✣**")
await ping.edit("**✣✣**")
await ping.edit("**✣✣✣**")
await ping.edit("**✣✣✣✣**")
end = datetime.now()
duration = (end - start).microseconds / 1000
user = await bot.get_me()
await ping.edit(
f"**PONG!!🏓**\n"
f"✣ **Pinger** - `%sms`\n"
f"✣ **Uptime -** `{uptime}` \n"
f"**✦҈͜͡Owner :** [{user.first_name}](tg://user?id={user.id})" % (duration)
)
@bot.on(man_cmd(outgoing=True, pattern=r"xping$"))
async def _(ping):
"""For .ping command, ping the userbot from any chat."""
uptime = await get_readable_time((time.time() - StartTime))
start = datetime.now()
await ping.edit("`Pinging....`")
end = datetime.now()
duration = (end - start).microseconds / 1000
await ping.edit(
f"**PONG!! 🍭**\n**Pinger** : %sms\n**Bot Uptime** : {uptime}🕛" % (duration)
)
@bot.on(man_cmd(outgoing=True, pattern=r"lping$"))
async def _(ping):
"""For .ping command, ping the userbot from any chat."""
uptime = await get_readable_time((time.time() - StartTime))
start = datetime.now()
await ping.edit("**★ PING ★**")
await ping.edit("**★★ PING ★★**")
await ping.edit("**★★★ PING ★★★**")
await ping.edit("**★★★★ PING ★★★★**")
await ping.edit("**✦҈͜͡➳ PONG!**")
end = datetime.now()
duration = (end - start).microseconds / 1000
user = await bot.get_me()
await ping.edit(
f"❃ **Ping !!** "
f"`%sms` \n"
f"❃ **Uptime -** "
f"`{uptime}` \n"
f"**✦҈͜͡➳ Master :** [{user.first_name}](tg://user?id={user.id})" % (duration)
)
@bot.on(man_cmd(outgoing=True, pattern=r"fping$"))
async def _(f):
"""For .ping command, ping the userbot from any chat."""
await get_readable_time((time.time() - StartTime))
start = datetime.now()
await f.edit(". /¯ )")
await f.edit(". /¯ )\n /¯ /")
await f.edit(
". /¯ )\n /¯ /\n / /"
)
await f.edit(
". /¯ )\n /¯ /\n / /\n /´¯/' '/´¯¯`•¸"
)
await f.edit(
". /¯ )\n /¯ /\n / /\n /´¯/' '/´¯¯`•¸\n /'/ / / /¨¯\\ "
)
await f.edit(
". /¯ )\n /¯ /\n / /\n /´¯/' '/´¯¯`•¸\n /'/ / / /¨¯\\ \n ('( ( ( ( ¯~/' ')"
)
await f.edit(
". /¯ )\n /¯ /\n / /\n /´¯/' '/´¯¯`•¸\n /'/ / / /¨¯\\ \n ('( ( ( ( ¯~/' ')\n \\ /"
)
await f.edit(
". /¯ )\n /¯ /\n / /\n /´¯/' '/´¯¯`•¸\n /'/ / / /¨¯\\ \n ('( ( ( ( ¯~/' ')\n \\ /\n \\ _.•´"
)
await f.edit(
". /¯ )\n /¯ /\n / /\n /´¯/' '/´¯¯`•¸\n /'/ / / /¨¯\\ \n ('( ( ( ( ¯~/' ')\n \\ /\n \\ _.•´\n \\ ("
)
await f.edit(
". /¯ )\n /¯ /\n / /\n /´¯/' '/´¯¯`•¸\n /'/ / / /¨¯\\ \n ('( ( ( ( ¯~/' ')\n \\ /\n \\ _.•´\n \\ (\n \\ "
)
end = datetime.now()
duration = (end - start).microseconds / 1000
user = await bot.get_me()
await f.edit(
f"**PONG!!🏓**\n"
f"✣ **Pinger** - `%sms`\n"
f"✣ **Uptime -** `{uptime}` \n"
f"**✦҈͜͡Owner :** [{user.first_name}](tg://user?id={user.id})" % (duration)
)
@bot.on(man_cmd(outgoing=True, pattern=r"keping$"))
async def _(pong):
await get_readable_time((time.time() - StartTime))
start = datetime.now()
await pong.edit("**『⍟𝐊𝐎𝐍𝐓𝐎𝐋』**")
await pong.edit("**◆◈𝐊𝐀𝐌𝐏𝐀𝐍𝐆◈◆**")
await pong.edit("**𝐏𝐄𝐂𝐀𝐇𝐊𝐀𝐍 𝐁𝐈𝐉𝐈 𝐊𝐀𝐔 𝐀𝐒𝐔**")
await pong.edit("**☬𝐒𝐈𝐀𝐏 𝐊𝐀𝐌𝐏𝐀𝐍𝐆 𝐌𝐄𝐍𝐔𝐌𝐁𝐔𝐊 𝐀𝐒𝐔☬**")
end = datetime.now()
duration = (end - start).microseconds / 1000
user = await bot.get_me()
await pong.edit(
f"**✲ 𝙺𝙾𝙽𝚃𝙾𝙻 𝙼𝙴𝙻𝙴𝙳𝚄𝙶** "
f"\n ⫸ ᴷᵒⁿᵗᵒˡ `%sms` \n"
f"**✲ 𝙱𝙸𝙹𝙸 𝙿𝙴𝙻𝙴𝚁** "
f"\n ⫸ ᴷᵃᵐᵖᵃⁿᵍ『[{user.first_name}](tg://user?id={user.id})』 \n" % (duration)
)
# .keping & kping Coded by Koala
@bot.on(man_cmd(outgoing=True, pattern=r"kping$"))
async def _(pong):
uptime = await get_readable_time((time.time() - StartTime))
start = datetime.now()
await pong.edit("8✊===D")
await pong.edit("8=✊==D")
await pong.edit("8==✊=D")
await pong.edit("8===✊D")
await pong.edit("8==✊=D")
await pong.edit("8=✊==D")
await pong.edit("8✊===D")
await pong.edit("8=✊==D")
await pong.edit("8==✊=D")
await pong.edit("8===✊D")
await pong.edit("8==✊=D")
await pong.edit("8=✊==D")
await pong.edit("8✊===D")
await pong.edit("8=✊==D")
await pong.edit("8==✊=D")
await pong.edit("8===✊D")
await pong.edit("8===✊D💦")
await pong.edit("8====D💦💦")
await pong.edit("**CROOTTTT PINGGGG!**")
end = datetime.now()
duration = (end - start).microseconds / 1000
await pong.edit(
f"**NGENTOT!! 🐨**\n**KAMPANG** : %sms\n**Bot Uptime** : {uptime}🕛" % (duration)
)
@bot.on(man_cmd(outgoing=True, pattern=r"speedtest$"))
async def _(speed):
"""For .speedtest command, use SpeedTest to check server speeds."""
await speed.edit("`Running speed test...`")
test = Speedtest()
test.get_best_server()
test.download()
test.upload()
test.results.share()
result = test.results.dict()
msg = (
f"**Started at {result['timestamp']}**\n\n"
"**Client**\n"
f"**ISP :** `{result['client']['isp']}`\n"
f"**Country :** `{result['client']['country']}`\n\n"
"**Server**\n"
f"**Name :** `{result['server']['name']}`\n"
f"**Country :** `{result['server']['country']}`\n"
f"**Sponsor :** `{result['server']['sponsor']}`\n\n"
f"**Ping :** `{result['ping']}`\n"
f"**Upload :** `{humanbytes(result['upload'])}/s`\n"
f"**Download :** `{humanbytes(result['download'])}/s`"
)
await speed.delete()
await speed.client.send_file(
speed.chat_id,
result["share"],
caption=msg,
force_document=False,
)
@bot.on(man_cmd(outgoing=True, pattern=r"pong$"))
async def _(pong):
"""For .ping command, ping the userbot from any chat."""
start = datetime.now()
await pong.edit("`Sepong.....🏓`")
end = datetime.now()
duration = (end - start).microseconds / 9000
await pong.edit("🏓 **Ping!**\n`%sms`" % (duration))
# KALO NGEFORK absen ini GA USAH DI HAPUS YA GOBLOK 😡
@register(incoming=True, from_users=844432220, pattern=r"^.absen$")
async def risman(ganteng):
await ganteng.reply(random.choice(absen))
# JANGAN DI HAPUS GOBLOK 😡 LU COPY AJA TINGGAL TAMBAHIN
# DI HAPUS GUA GBAN YA 🥴 GUA TANDAIN | LU AKUN TELENYA 😡
CMD_HELP.update(
{
"ping": f"**Plugin : **`ping`\
\n\n • **Syntax :** `{cmd}ping` ; `{cmd}lping` ; `{cmd}xping` ; `{cmd}kping` ; `{cmd}fping`\
\n • **Function : **Untuk menunjukkan ping userbot.\
\n\n • **Syntax :** `{cmd}pong`\
\n • **Function : **Sama seperti perintah ping\
"
}
)
CMD_HELP.update(
{
"speedtest": f"**Plugin : **`speedtest`\
\n\n • **Syntax :** `{cmd}speedtest`\
\n • **Function : **Untuk Mengetes kecepatan server userbot.\
"
}
)
| |
spawner.rs | use super::{
random_table::MasterTable, raws::*, Attribute, AttributeBonus, Attributes, Duration,
EntryTrigger, EquipmentChanged, Faction, HungerClock, HungerState, Initiative, KnownSpells,
LightSource, Map, MasterDungeonMap, Name, OtherLevelPosition, Player, Pool, Pools, Position,
Rect, Renderable, SerializeMe, SingleActivation, Skill, Skills, StatusEffect, TeleportTo,
TileType, Viewshed,
};
use crate::{attr_bonus, mana_at_level, player_hp_at_level};
use rltk::RGB;
use specs::prelude::*;
use specs::saveload::{MarkedBuilder, SimpleMarker};
use std::collections::HashMap;
/// Spawns the player and returns his/her entity object.
pub fn player(ecs: &mut World, player_x: i32, player_y: i32) -> Entity {
spawn_all_spells(ecs);
let mut skills = Skills {
skills: HashMap::new(),
};
skills.skills.insert(Skill::Melee, 1);
skills.skills.insert(Skill::Defense, 1);
skills.skills.insert(Skill::Magic, 1);
let player = ecs
.create_entity()
.with(Position {
x: player_x,
y: player_y,
})
.with(Renderable {
glyph: rltk::to_cp437('@'),
fg: RGB::named(rltk::YELLOW),
bg: RGB::named(rltk::BLACK),
render_order: 0,
})
.with(Player {})
.with(Viewshed {
visible_tiles: Vec::new(),
range: 8,
dirty: true,
})
.with(Name {
name: "Player".to_string(),
})
.with(HungerClock {
state: HungerState::WellFed,
duration: 20,
})
.with(Attributes {
might: Attribute {
base: 11,
modifiers: 0,
bonus: attr_bonus(11),
},
fitness: Attribute {
base: 11,
modifiers: 0,
bonus: attr_bonus(11),
},
quickness: Attribute {
base: 11,
modifiers: 0,
bonus: attr_bonus(11),
},
intelligence: Attribute {
base: 11,
modifiers: 0,
bonus: attr_bonus(11),
},
})
.with(skills)
.with(Pools {
hit_points: Pool {
current: player_hp_at_level(11, 1),
max: player_hp_at_level(11, 1),
},
mana: Pool {
current: mana_at_level(11, 1),
max: mana_at_level(11, 1),
},
xp: 0,
level: 1,
total_weight: 0.0,
total_initiative_penalty: 0.0,
gold: 0.0,
god_mode: false,
})
.with(EquipmentChanged {})
.with(LightSource {
color: rltk::RGB::from_f32(1.0, 1.0, 0.5),
range: 8,
})
.with(Initiative { current: 0 })
.with(Faction {
name: "Player".to_string(),
})
.with(KnownSpells { spells: Vec::new() })
.marked::<SimpleMarker<SerializeMe>>()
.build();
// Starting equipment
spawn_named_entity(
&RAWS.lock().unwrap(),
ecs,
"Rusty Longsword",
SpawnType::Equipped { by: player },
);
spawn_named_entity(
&RAWS.lock().unwrap(),
ecs,
"Dried Sausage",
SpawnType::Carried { by: player },
);
spawn_named_entity(
&RAWS.lock().unwrap(),
ecs,
"Beer",
SpawnType::Carried { by: player },
);
spawn_named_entity(
&RAWS.lock().unwrap(),
ecs,
"Stained Tunic",
SpawnType::Equipped { by: player },
);
spawn_named_entity(
&RAWS.lock().unwrap(),
ecs,
"Torn Trousers",
SpawnType::Equipped { by: player },
);
spawn_named_entity(
&RAWS.lock().unwrap(),
ecs,
"Old Boots",
SpawnType::Equipped { by: player },
);
spawn_named_entity(
&RAWS.lock().unwrap(),
ecs,
"Shortbow",
SpawnType::Carried { by: player },
);
// Starting hangover
ecs.create_entity()
.with(StatusEffect { target: player })
.with(Duration { turns: 10 })
.with(Name {
name: "Hangover".to_string(),
})
.with(AttributeBonus {
might: Some(-1),
fitness: None,
quickness: Some(-1),
intelligence: Some(-1),
})
.marked::<SimpleMarker<SerializeMe>>()
.build();
player
}
const MAX_MONSTERS: i32 = 4;
fn room_table(map_depth: i32) -> MasterTable {
get_spawn_table_for_depth(&RAWS.lock().unwrap(), map_depth)
}
/// Fills a room with stuff!
pub fn | (map: &Map, room: &Rect, map_depth: i32, spawn_list: &mut Vec<(usize, String)>) {
let mut possible_targets: Vec<usize> = Vec::new();
{
// Borrow scope - to keep access to the map separated
for y in room.y1 + 1..room.y2 {
for x in room.x1 + 1..room.x2 {
let idx = map.xy_idx(x, y);
if map.tiles[idx] == TileType::Floor {
possible_targets.push(idx);
}
}
}
}
spawn_region(map, &possible_targets, map_depth, spawn_list);
}
/// Fills a region with stuff!
pub fn spawn_region(
_map: &Map,
area: &[usize],
map_depth: i32,
spawn_list: &mut Vec<(usize, String)>,
) {
let spawn_table = room_table(map_depth);
let mut spawn_points: HashMap<usize, String> = HashMap::new();
let mut areas: Vec<usize> = Vec::from(area);
// Scope to keep the borrow checker happy
{
let num_spawns = i32::min(
areas.len() as i32,
crate::rng::roll_dice(1, MAX_MONSTERS + 3) + (map_depth - 1) - 3,
);
if num_spawns == 0 {
return;
}
for _i in 0..num_spawns {
let array_index = if areas.len() == 1 {
0usize
} else {
(crate::rng::roll_dice(1, areas.len() as i32) - 1) as usize
};
let map_idx = areas[array_index];
spawn_points.insert(map_idx, spawn_table.roll());
areas.remove(array_index);
}
}
// Actually spawn the monsters
for spawn in spawn_points.iter() {
spawn_list.push((*spawn.0, spawn.1.to_string()));
}
}
/// Spawns a named entity (name in tuple.1) at the location in (tuple.0)
pub fn spawn_entity(ecs: &mut World, spawn: &(&usize, &String)) {
let map = ecs.fetch::<Map>();
let width = map.width as usize;
let x = (*spawn.0 % width) as i32;
let y = (*spawn.0 / width) as i32;
std::mem::drop(map);
let spawn_result = spawn_named_entity(
&RAWS.lock().unwrap(),
ecs,
&spawn.1,
SpawnType::AtPosition { x, y },
);
if spawn_result.is_some() {
return;
}
if spawn.1 != "None" {
rltk::console::log(format!(
"WARNING: We don't know how to spawn [{}]!",
spawn.1
));
}
}
pub fn spawn_town_portal(ecs: &mut World) {
// Get current position & depth
let map = ecs.fetch::<Map>();
let player_depth = map.depth;
let player_pos = ecs.fetch::<rltk::Point>();
let player_x = player_pos.x;
let player_y = player_pos.y;
std::mem::drop(player_pos);
std::mem::drop(map);
// Find part of the town for the portal
let dm = ecs.fetch::<MasterDungeonMap>();
let town_map = dm.get_map(1).unwrap();
let mut stairs_idx = 0;
for (idx, tt) in town_map.tiles.iter().enumerate() {
if *tt == TileType::DownStairs {
stairs_idx = idx;
}
}
let portal_x = (stairs_idx as i32 % town_map.width) - 2;
let portal_y = stairs_idx as i32 / town_map.width;
std::mem::drop(dm);
// Spawn the portal itself
ecs.create_entity()
.with(OtherLevelPosition {
x: portal_x,
y: portal_y,
depth: 1,
})
.with(Renderable {
glyph: rltk::to_cp437('♥'),
fg: RGB::named(rltk::CYAN),
bg: RGB::named(rltk::BLACK),
render_order: 0,
})
.with(EntryTrigger {})
.with(TeleportTo {
x: player_x,
y: player_y,
depth: player_depth,
player_only: true,
})
.with(SingleActivation {})
.with(Name {
name: "Town Portal".to_string(),
})
.build();
}
| spawn_room |
parses-game.js | import EmberObject from '@ember/object'; |
export default Mixin.create({
iconMap() {
return {
single: 'fa-user',
multi: 'fa-users',
}
},
normalize(game, hidden, starred) {
// "players": "Single-player" || players": "Multi-player",
game.playersIcon = (game.players || []).map((player) => this.iconMap()[player]);
game.starred = starred.indexOf(game.appid) >= 0;
game.hidden = hidden.indexOf(game.appid) >= 0;
game.index = Math.round(Math.random() * 10000);
game.url = `https://store.steampowered.com/app/${game.appid}`;
return EmberObject.create(game);
},
}); | import Mixin from '@ember/object/mixin'; |
mysql_provider.go | package dbmigrate
import (
"fmt"
// mysql driver, imported only to exec init function
_ "github.com/go-sql-driver/mysql"
)
func | () {
providers["mysql"] = &mysqlProvider{}
}
// mysqlProvider is the provider interface implementation for mysql
type mysqlProvider struct {
defaultProvider
}
func (p *mysqlProvider) driver() string {
return "mysql"
}
func (p *mysqlProvider) dsn(settings *Settings) (string, error) {
if settings.Database == "" {
return "", errDBNameNotProvided
}
if settings.User == "" {
return "", errUserNotProvided
}
host := settings.Host
if host == "" {
host = "127.0.0.1"
}
port := settings.Port
if port == 0 {
port = 3306
}
return fmt.Sprintf("%s:%s@tcp(%s:%d)/%s?parseTime=true", settings.User, settings.Password, host, port, settings.Database), nil
}
| init |
util.go | package internal
import (
"fmt"
"time"
)
// TImeSince returns the time since the given duration rounded down to the nearest second.
func TimeSince(start time.Time) time.Duration {
return time.Since(start) / time.Second * time.Second
}
// MakeTestRepoName returns the given repo name as a fully qualified repository name in the
// sourcegraph-testing GitHub organization.
func MakeTestRepoName(repoName string) string | {
return fmt.Sprintf("github.com/%s/%s", "sourcegraph-testing", repoName)
} |
|
picnic.py | #!/usr/bin/env python3
"""
Author : mahmoudabdelrahman <mahmoudabdelrahman@localhost>
Date : 2022-01-28
Purpose: Rock the Casbah
"""
import argparse
# --------------------------------------------------
def get_args():
"""Get command-line arguments"""
parser = argparse.ArgumentParser(
description='Rock the Casbah',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('item',
metavar='str',
nargs='+',
help='Item(s) to bring')
parser.add_argument('-s',
'--sorted',
action='store_true',
help='Sort the items')
return parser.parse_args()
# --------------------------------------------------
def main():
"""Make a jazz noise here"""
args = get_args()
items = args.item
length= len(items)
if args.sorted:
items.sort()
list_of_items = ''
if length == 1:
list_of_items = items[0]
elif length == 2:
list_of_items = ' and '.join(items)
else:
items[-1] = 'and ' + items[-1]
list_of_items = ', '.join(items)
print(f'You are bringing {list_of_items}.')
# --------------------------------------------------
if __name__ == '__main__':
| main() |
|
__init__.py | from string import Template
from dominate.tags import script, link, style
from dominate.util import raw
import json
REQUIRED = [
script(
src="https://unpkg.com/[email protected]/dist/axios.min.js", crossorigin="anonymous"
),
script(
src="https://code.jquery.com/jquery-3.3.1.slim.min.js",
integrity="sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo",
crossorigin="anonymous",
),
]
BOOTSTRAP = [
link(
rel="stylesheet",
href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css",
integrity="sha384-ggOyR0iXCbMQv3Xipma34MD+dH/1fQ784/j6cY/iJTQUOhcWr7x9JvoRxT2MZw1T",
crossorigin="anonymous",
),
script(
src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.7/umd/popper.min.js",
integrity="sha384-UO2eT0CpHqdSJQ6hJty5KVphtPhzWj9WO1clHTMGa3JDZwrnQq4sF86dIHNDz0W1",
crossorigin="anonymous",
),
script(
src="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js",
integrity="sha384-JjSmVgyd0p3pXB1rRibZUAYoIIy6OrQ6VrjIEaFf/nJGzIxFDsf4x0xIM+B07jRM",
crossorigin="anonymous",
),
]
CHARTJS = script(
src="https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.8.0/Chart.bundle.min.js"
)
def render(x):
if isinstance(x, list):
return "".join(e.render(pretty=False) for e in x)
return x.render(pretty=False)
class CustomTemplate(Template):
delimiter = "$$"
class JavaScript:
defaults = None
js_source = ""
def render(self, values, with_script_tag=True):
template = CustomTemplate(self.js_source)
rendered = raw(template.substitute(values).strip())
if with_script_tag:
return script(rendered, type="text/javascript")
else:
return rendered
def __new__(cls, with_script_tag=True, **kwargs):
values = cls.defaults or {}
values.update(kwargs)
inst = super(JavaScript, cls).__new__(cls)
return inst.render(values, with_script_tag)
class | (JavaScript):
js_source = """
function ReplaceOuterHtml(url, id, params){
axios.get(url, {params: params === undefined ? {} : params})
.then(function (response) {
document.getElementById(id).outerHTML = response.data;
});
};
"""
def replaceOuterHtml(url, id, **kwargs):
params = json.dumps(kwargs)
return f"ReplaceOuterHtml('{url}', '{id}', {params})"
class CreateReplaceInnerHtmlFunc(JavaScript):
js_source = """
function ReplaceInnerHtml(url, id, params){
axios.get(url, {params: params === undefined ? {} : params})
.then(function (response) {
document.getElementById(id).innerHTML = response.data;
});
};
"""
def replaceInnerHtml(url, id, **kwargs):
params = json.dumps(kwargs)
return f"ReplaceInnerHtml('{url}', '{id}', {params})"
# backwards compatibility
replaceHtml = replaceInnerHtml
class CreateAppendHtmlFunc(JavaScript):
js_source = """
function AppendHtml(url, id, params){
axios.get(url, {params: params === undefined ? {} : params})
.then( function (response) {
$("#"+id).append(response.data);
});
};
"""
def appendHtml(url, id, **kwargs):
params = json.dumps(kwargs)
return f"AppendHtml('{url}', '{id}', {params})"
class CreatePrependHtmlFunc(JavaScript):
js_source = """
function PrependHtml(url, id, params){
axios.get(url, {params: params === undefined ? {} : params})
.then( function(response) {
$("#"+id).prepend(response.data);
});
};
"""
def prependHtml(url, id, **kwargs):
params = json.dumps(kwargs)
return f"PrependHtml('{url}', '{id}', {params})"
class CreateRemoveHtmlFunc(JavaScript):
js_source = """
function RemoveHtml(id){
$("#"+id).remove();
};
"""
class CreateSetAttributeFunction(JavaScript):
js_source = """
function SetAttribute(id, attribute, value){
$("#"+id).attr(attribute, value)
};
"""
def removeHtml(id):
return f"RemoveHtml('{id}')"
def chain_functions(*function_strings):
return "; ".join(function_strings) + ";"
def style_tag_with_css(css):
return style(raw(css))
class LineChart(JavaScript):
js_source = """
const canvas$$id = $('#$$id')
const lineChart$$id = new Chart(canvas$$id, {
type: "line",
data: {
labels: $$xlabels,
datasets: $$datasets
},
options: $$options
});
"""
def line_chart(id, xlabels, datasets, options):
return LineChart(id=id, xlabels=json.dumps(xlabels), datasets=json.dumps(datasets), options=json.dumps(options or {}))
| CreateReplaceOuterHtmlFunc |
mmap_unix.go | // Copyright 2017 tantexian
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// +build darwin dragonfly freebsd linux openbsd solaris netbsd
package mmap
import (
"syscall"
)
func mmap(len int, inprot, inflags, fd uintptr, off int64) ([]byte, error) {
flags := syscall.MAP_SHARED
prot := syscall.PROT_READ
switch {
case inprot© != 0:
prot |= syscall.PROT_WRITE
flags = syscall.MAP_PRIVATE
case inprot&RDWR != 0:
prot |= syscall.PROT_WRITE
}
if inprot&EXEC != 0 {
prot |= syscall.PROT_EXEC
}
if inflags&ANON != 0 {
flags |= syscall.MAP_ANON
}
b, err := syscall.Mmap(int(fd), off, len, prot, flags)
if err != nil {
return nil, err
}
return b, nil
}
func flush(addr, len uintptr) error {
_, _, errno := syscall.Syscall(_SYS_MSYNC, addr, len, _MS_SYNC)
if errno != 0 {
return syscall.Errno(errno)
}
return nil
}
func lock(addr, len uintptr) error {
_, _, errno := syscall.Syscall(syscall.SYS_MLOCK, addr, len, 0)
if errno != 0 {
return syscall.Errno(errno)
}
return nil
}
func unlock(addr, len uintptr) error {
_, _, errno := syscall.Syscall(syscall.SYS_MUNLOCK, addr, len, 0)
if errno != 0 {
return syscall.Errno(errno)
} | }
func unmap(addr, len uintptr) error {
_, _, errno := syscall.Syscall(syscall.SYS_MUNMAP, addr, len, 0)
if errno != 0 {
return syscall.Errno(errno)
}
return nil
} | return nil |
sys_validate.rs | //! # System Validation Checks
//! This module contains all the checks we run for sys validation
use super::queue_consumer::TriggerSender;
use super::ribosome::RibosomeT;
use super::workflow::incoming_dht_ops_workflow::incoming_dht_ops_workflow;
use super::workflow::sys_validation_workflow::SysValidationWorkspace;
use crate::conductor::entry_def_store::get_entry_def;
use crate::conductor::handle::ConductorHandleT;
use crate::conductor::space::Space;
use holochain_keystore::AgentPubKeyExt;
use holochain_p2p::HolochainP2pDna;
use holochain_types::prelude::*;
use holochain_zome_types::countersigning::CounterSigningSessionData;
use std::convert::TryInto;
use std::sync::Arc;
pub(super) use error::*;
pub use holo_hash::*;
pub use holochain_state::source_chain::SourceChainError;
pub use holochain_state::source_chain::SourceChainResult;
pub use holochain_zome_types::HeaderHashed;
pub use holochain_zome_types::Timestamp;
#[allow(missing_docs)]
mod error;
#[cfg(test)]
mod tests;
/// 16mb limit on Entries due to websocket limits.
/// Consider splitting large entries up.
pub const MAX_ENTRY_SIZE: usize = 16_000_000;
/// 1kb limit on LinkTags.
/// Tags are used as keys to the database to allow
/// fast lookup so they should be small.
pub const MAX_TAG_SIZE: usize = 1000;
/// Verify the signature for this header
pub async fn verify_header_signature(sig: &Signature, header: &Header) -> SysValidationResult<()> {
if header.author().verify_signature(sig, header).await {
Ok(())
} else {
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::Counterfeit((*sig).clone(), (*header).clone()),
))
}
}
/// Verify the author key was valid at the time
/// of signing with dpki
/// TODO: This is just a stub until we have dpki.
pub async fn author_key_is_valid(_author: &AgentPubKey) -> SysValidationResult<()> {
Ok(())
}
/// Verify the countersigning session contains the specified header.
pub fn check_countersigning_session_data_contains_header(
entry_hash: EntryHash,
session_data: &CounterSigningSessionData,
header: NewEntryHeaderRef<'_>,
) -> SysValidationResult<()> {
let header_is_in_session = session_data
.build_header_set(entry_hash)
.map_err(SysValidationError::from)?
.iter()
.any(|session_header| match (&header, session_header) {
(NewEntryHeaderRef::Create(create), Header::Create(session_create)) => {
create == &session_create
}
(NewEntryHeaderRef::Update(update), Header::Update(session_update)) => {
update == &session_update
}
_ => false,
});
if !header_is_in_session {
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::HeaderNotInCounterSigningSession(
session_data.to_owned(),
header.to_new_entry_header(),
),
))
} else {
Ok(())
}
}
/// Verify that the signature on a preflight request is valid.
pub async fn check_countersigning_preflight_response_signature(
preflight_response: &PreflightResponse,
) -> SysValidationResult<()> {
let signature_is_valid = preflight_response
.request()
.signing_agents()
.get(*preflight_response.agent_state().agent_index() as usize)
.ok_or_else(|| {
SysValidationError::ValidationOutcome(ValidationOutcome::PreflightResponseSignature(
(*preflight_response).clone(),
))
})?
.0
.verify_signature_raw(
preflight_response.signature(),
preflight_response
.encode_for_signature()
.map_err(|_| {
SysValidationError::ValidationOutcome(
ValidationOutcome::PreflightResponseSignature(
(*preflight_response).clone(),
),
)
})?
.into(),
)
.await;
if signature_is_valid {
Ok(())
} else {
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::PreflightResponseSignature((*preflight_response).clone()),
))
}
}
/// Verify all the countersigning session data together.
pub async fn check_countersigning_session_data(
entry_hash: EntryHash,
session_data: &CounterSigningSessionData,
header: NewEntryHeaderRef<'_>,
) -> SysValidationResult<()> {
session_data.check_integrity()?;
check_countersigning_session_data_contains_header(entry_hash, session_data, header)?;
let tasks: Vec<_> = session_data
.responses()
.iter()
.map(|(response, signature)| async move {
let preflight_response = PreflightResponse::try_new(
session_data.preflight_request().clone(),
response.clone(),
signature.clone(),
)?;
check_countersigning_preflight_response_signature(&preflight_response).await
})
.collect();
let results: Vec<SysValidationResult<()>> = futures::future::join_all(tasks).await;
let results: SysValidationResult<()> = results.into_iter().collect();
match results {
Ok(_) => Ok(()),
Err(e) => Err(e),
}
}
/// Check that previous header makes sense
/// for this header.
/// If not Dna then cannot be root of chain
/// and must have previous header
pub fn check_prev_header(header: &Header) -> SysValidationResult<()> {
match &header {
Header::Dna(_) => Ok(()),
_ => {
if header.header_seq() > 0 {
header
.prev_header()
.ok_or(PrevHeaderError::MissingPrev)
.map_err(ValidationOutcome::from)?;
Ok(())
} else {
Err(PrevHeaderError::InvalidRoot).map_err(|e| ValidationOutcome::from(e).into())
}
}
}
}
/// Check that Dna headers are only added to empty source chains
pub async fn check_valid_if_dna(
header: &Header,
workspace: &SysValidationWorkspace,
) -> SysValidationResult<()> {
match header {
Header::Dna(_) => {
if !workspace.is_chain_empty(header.author()).await? {
Err(PrevHeaderError::InvalidRoot).map_err(|e| ValidationOutcome::from(e).into())
} else if header.timestamp() < workspace.dna_def().origin_time {
// If the Dna timestamp is ahead of the origin time, every other header
// will be inductively so also due to the prev_header check
Err(PrevHeaderError::InvalidRootOriginTime)
.map_err(|e| ValidationOutcome::from(e).into())
} else {
Ok(())
}
}
_ => Ok(()),
}
}
/// Check if there are other headers at this
/// sequence number
pub async fn check_chain_rollback(
header: &Header,
workspace: &SysValidationWorkspace,
) -> SysValidationResult<()> {
let empty = workspace.header_seq_is_empty(header).await?;
// Ok or log warning
if empty {
Ok(())
} else {
// TODO: implement real rollback detection once we know what that looks like
tracing::error!(
"Chain rollback detected at position {} for agent {:?} from header {:?}",
header.header_seq(),
header.author(),
header,
);
Ok(())
}
}
/// Placeholder for future spam check.
/// Check header timestamps don't exceed MAX_PUBLISH_FREQUENCY
pub async fn check_spam(_header: &Header) -> SysValidationResult<()> {
Ok(())
}
/// Check previous header timestamp is before this header
pub fn check_prev_timestamp(header: &Header, prev_header: &Header) -> SysValidationResult<()> {
if header.timestamp() > prev_header.timestamp() {
Ok(())
} else {
Err(PrevHeaderError::Timestamp).map_err(|e| ValidationOutcome::from(e).into())
}
}
/// Check the previous header is one less than the current
pub fn check_prev_seq(header: &Header, prev_header: &Header) -> SysValidationResult<()> {
let header_seq = header.header_seq();
let prev_seq = prev_header.header_seq();
if header_seq > 0 && prev_seq == header_seq - 1 {
Ok(())
} else {
Err(PrevHeaderError::InvalidSeq(header_seq, prev_seq))
.map_err(|e| ValidationOutcome::from(e).into())
}
}
/// Check the entry variant matches the variant in the headers entry type
pub fn check_entry_type(entry_type: &EntryType, entry: &Entry) -> SysValidationResult<()> {
match (entry_type, entry) {
(EntryType::AgentPubKey, Entry::Agent(_)) => Ok(()),
(EntryType::App(_), Entry::App(_)) => Ok(()),
(EntryType::App(_), Entry::CounterSign(_, _)) => Ok(()),
(EntryType::CapClaim, Entry::CapClaim(_)) => Ok(()),
(EntryType::CapGrant, Entry::CapGrant(_)) => Ok(()),
_ => Err(ValidationOutcome::EntryType.into()),
}
}
/// Check the AppEntryType is valid for the zome.
/// Check the EntryDefId and ZomeId are in range.
pub async fn check_app_entry_type(
dna_hash: &DnaHash,
entry_type: &AppEntryType,
conductor: &dyn ConductorHandleT,
) -> SysValidationResult<EntryDef> {
// We want to be careful about holding locks open to the conductor api
// so calls are made in blocks
let ribosome = conductor
.get_ribosome(dna_hash)
.map_err(|_| SysValidationError::DnaMissing(dna_hash.clone()))?;
// Check if the zome is found
let zome = ribosome
.find_zome_from_entry(&entry_type.id())
.ok_or_else(|| ValidationOutcome::ZomeId(entry_type.clone()))?
.into_inner()
.1;
let entry_def = get_entry_def(entry_type.id(), zome, dna_hash, conductor).await?;
// Check the visibility and return
match entry_def {
Some(entry_def) => {
if entry_def.visibility == *entry_type.visibility() {
Ok(entry_def)
} else {
Err(ValidationOutcome::EntryVisibility(entry_type.clone()).into())
}
}
None => Err(ValidationOutcome::EntryDefId(entry_type.clone()).into()),
}
}
/// Check the app entry type isn't private for store entry
pub fn check_not_private(entry_def: &EntryDef) -> SysValidationResult<()> {
match entry_def.visibility {
EntryVisibility::Public => Ok(()),
EntryVisibility::Private => Err(ValidationOutcome::PrivateEntry.into()),
}
}
/// Check the headers entry hash matches the hash of the entry
pub async fn check_entry_hash(hash: &EntryHash, entry: &Entry) -> SysValidationResult<()> {
if *hash == EntryHash::with_data_sync(entry) {
Ok(())
} else {
Err(ValidationOutcome::EntryHash.into())
}
}
/// Check the header should have an entry.
/// Is either a Create or Update
pub fn check_new_entry_header(header: &Header) -> SysValidationResult<()> {
match header {
Header::Create(_) | Header::Update(_) => Ok(()),
_ => Err(ValidationOutcome::NotNewEntry(header.clone()).into()),
}
}
/// Check the entry size is under the MAX_ENTRY_SIZE
pub fn check_entry_size(entry: &Entry) -> SysValidationResult<()> {
match entry {
Entry::App(bytes) => {
let size = std::mem::size_of_val(&bytes.bytes()[..]);
if size < MAX_ENTRY_SIZE {
Ok(())
} else {
Err(ValidationOutcome::EntryTooLarge(size, MAX_ENTRY_SIZE).into())
}
}
// Other entry types are small
_ => Ok(()),
}
}
/// Check the link tag size is under the MAX_TAG_SIZE
pub fn check_tag_size(tag: &LinkTag) -> SysValidationResult<()> {
let size = std::mem::size_of_val(&tag.0[..]);
if size < MAX_TAG_SIZE {
Ok(())
} else {
Err(ValidationOutcome::TagTooLarge(size, MAX_TAG_SIZE).into())
}
}
/// Check a Update's entry type is the same for
/// original and new entry.
pub fn check_update_reference(
eu: &Update,
original_entry_header: &NewEntryHeaderRef<'_>,
) -> SysValidationResult<()> {
if eu.entry_type == *original_entry_header.entry_type() {
Ok(())
} else {
Err(ValidationOutcome::UpdateTypeMismatch(
eu.entry_type.clone(),
original_entry_header.entry_type().clone(),
)
.into())
}
}
/// Validate a chain of headers with an optional starting point.
pub fn validate_chain<'iter>(
mut headers: impl Iterator<Item = &'iter HeaderHashed>,
persisted_chain_head: &Option<(HeaderHash, u32)>,
) -> SysValidationResult<()> {
// Check the chain starts in a valid way.
let mut last_item = match headers.next() {
Some(HeaderHashed {
hash,
content: header,
}) => {
match persisted_chain_head {
Some((prev_hash, prev_seq)) => {
check_prev_header_chain(prev_hash, *prev_seq, header)
.map_err(ValidationOutcome::from)?;
}
None => {
// If there's no persisted chain head, then the first header
// must be a DNA.
if !matches!(header, Header::Dna(_)) {
return Err(ValidationOutcome::from(PrevHeaderError::InvalidRoot).into());
}
}
}
let seq = header.header_seq();
(hash, seq)
}
None => return Ok(()),
};
for HeaderHashed {
hash,
content: header,
} in headers
{
// Check each item of the chain is valid.
check_prev_header_chain(last_item.0, last_item.1, header)
.map_err(ValidationOutcome::from)?;
last_item = (hash, header.header_seq());
}
Ok(())
}
// Check the header is valid for the previous header.
fn check_prev_header_chain(
prev_header_hash: &HeaderHash,
prev_header_seq: u32,
header: &Header,
) -> Result<(), PrevHeaderError> {
// DNA cannot appear later in the chain.
if matches!(header, Header::Dna(_)) {
Err(PrevHeaderError::InvalidRoot)
} else if header.prev_header().map_or(true, |p| p != prev_header_hash) {
// Check the prev hash matches.
Err(PrevHeaderError::HashMismatch)
} else if header
.header_seq()
.checked_sub(1)
.map_or(true, |s| prev_header_seq != s)
{
// Check the prev seq is one less.
Err(PrevHeaderError::InvalidSeq(
header.header_seq(),
prev_header_seq,
))
} else {
Ok(())
}
}
/// If we are not holding this header then
/// retrieve it and send it as a RegisterAddLink DhtOp
/// to our incoming_dht_ops_workflow.
///
/// Apply a checks callback to the Element.
///
/// Additionally sys validation will be triggered to
/// run again if we weren't holding it.
pub async fn check_and_hold_register_add_link<F>(
hash: &HeaderHash,
workspace: &SysValidationWorkspace,
network: HolochainP2pDna,
incoming_dht_ops_sender: Option<IncomingDhtOpSender>,
f: F,
) -> SysValidationResult<()>
where
F: FnOnce(&Element) -> SysValidationResult<()>,
{
let source = check_and_hold(hash, workspace, network).await?;
f(source.as_ref())?;
if let (Some(incoming_dht_ops_sender), Source::Network(element)) =
(incoming_dht_ops_sender, source)
{
incoming_dht_ops_sender
.send_register_add_link(element)
.await?;
}
Ok(())
}
/// If we are not holding this header then
/// retrieve it and send it as a RegisterAgentActivity DhtOp
/// to our incoming_dht_ops_workflow.
///
/// Apply a checks callback to the Element.
///
/// Additionally sys validation will be triggered to
/// run again if we weren't holding it.
pub async fn check_and_hold_register_agent_activity<F>(
hash: &HeaderHash,
workspace: &SysValidationWorkspace,
network: HolochainP2pDna,
incoming_dht_ops_sender: Option<IncomingDhtOpSender>,
f: F,
) -> SysValidationResult<()>
where
F: FnOnce(&Element) -> SysValidationResult<()>,
{
let source = check_and_hold(hash, workspace, network).await?;
f(source.as_ref())?;
if let (Some(incoming_dht_ops_sender), Source::Network(element)) =
(incoming_dht_ops_sender, source)
{
incoming_dht_ops_sender
.send_register_agent_activity(element)
.await?;
}
Ok(())
}
/// If we are not holding this header then
/// retrieve it and send it as a StoreEntry DhtOp
/// to our incoming_dht_ops_workflow.
///
/// Apply a checks callback to the Element.
///
/// Additionally sys validation will be triggered to
/// run again if we weren't holding it.
pub async fn check_and_hold_store_entry<F>(
hash: &HeaderHash,
workspace: &SysValidationWorkspace,
network: HolochainP2pDna,
incoming_dht_ops_sender: Option<IncomingDhtOpSender>,
f: F,
) -> SysValidationResult<()>
where
F: FnOnce(&Element) -> SysValidationResult<()>,
{
let source = check_and_hold(hash, workspace, network).await?;
f(source.as_ref())?;
if let (Some(incoming_dht_ops_sender), Source::Network(element)) =
(incoming_dht_ops_sender, source)
{
incoming_dht_ops_sender.send_store_entry(element).await?;
}
Ok(())
}
/// If we are not holding this entry then
/// retrieve any element at this EntryHash
/// and send it as a StoreEntry DhtOp
/// to our incoming_dht_ops_workflow.
///
/// Note this is different to check_and_hold_store_entry
/// because it gets the Element via an EntryHash which
/// means it will be any Element.
///
/// Apply a checks callback to the Element.
///
/// Additionally sys validation will be triggered to
/// run again if we weren't holding it.
pub async fn check_and_hold_any_store_entry<F>(
hash: &EntryHash,
workspace: &SysValidationWorkspace,
network: HolochainP2pDna,
incoming_dht_ops_sender: Option<IncomingDhtOpSender>,
f: F,
) -> SysValidationResult<()>
where
F: FnOnce(&Element) -> SysValidationResult<()>,
{
let source = check_and_hold(hash, workspace, network).await?;
f(source.as_ref())?;
if let (Some(incoming_dht_ops_sender), Source::Network(element)) =
(incoming_dht_ops_sender, source)
{
incoming_dht_ops_sender.send_store_entry(element).await?;
}
Ok(())
}
/// If we are not holding this header then
/// retrieve it and send it as a StoreElement DhtOp
/// to our incoming_dht_ops_workflow.
///
/// Apply a checks callback to the Element.
///
/// Additionally sys validation will be triggered to
/// run again if we weren't holding it.
pub async fn check_and_hold_store_element<F>(
hash: &HeaderHash,
workspace: &SysValidationWorkspace,
network: HolochainP2pDna,
incoming_dht_ops_sender: Option<IncomingDhtOpSender>,
f: F,
) -> SysValidationResult<()>
where
F: FnOnce(&Element) -> SysValidationResult<()>,
{
let source = check_and_hold(hash, workspace, network).await?;
f(source.as_ref())?;
if let (Some(incoming_dht_ops_sender), Source::Network(element)) =
(incoming_dht_ops_sender, source)
{
incoming_dht_ops_sender.send_store_element(element).await?;
}
Ok(())
}
/// Allows you to send an op to the
/// incoming_dht_ops_workflow if you
/// found it on the network and were supposed
/// to be holding it.
#[derive(derive_more::Constructor, Clone)]
pub struct IncomingDhtOpSender {
space: Arc<Space>,
sys_validation_trigger: TriggerSender,
}
impl IncomingDhtOpSender {
/// Sends the op to the incoming workflow
async fn send_op(
self,
element: Element,
make_op: fn(Element) -> Option<(DhtOpHash, DhtOp)>,
) -> SysValidationResult<()> {
if let Some(op) = make_op(element) {
let ops = vec![op];
incoming_dht_ops_workflow(self.space.as_ref(), self.sys_validation_trigger, ops, false)
.await
.map_err(Box::new)?;
}
Ok(())
}
async fn send_store_element(self, element: Element) -> SysValidationResult<()> {
self.send_op(element, make_store_element).await
}
async fn send_store_entry(self, element: Element) -> SysValidationResult<()> {
let is_public_entry = element.header().entry_type().map_or(false, |et| {
matches!(et.visibility(), EntryVisibility::Public)
});
if is_public_entry {
self.send_op(element, make_store_entry).await?;
}
Ok(())
}
async fn send_register_add_link(self, element: Element) -> SysValidationResult<()> {
self.send_op(element, make_register_add_link).await
}
async fn send_register_agent_activity(self, element: Element) -> SysValidationResult<()> {
self.send_op(element, make_register_agent_activity).await
}
}
/// Where the element was found.
enum Source {
/// Locally because we are holding it or
/// because we will be soon
Local(Element),
/// On the network.
/// This means we aren't holding it so
/// we should add it to our incoming ops
Network(Element),
}
impl AsRef<Element> for Source {
fn as_ref(&self) -> &Element {
match self {
Source::Local(el) | Source::Network(el) => el,
}
}
}
/// Check if we are holding a dependency and
/// run a check callback on the it.
/// This function also returns where the dependency
/// was found so you can decide whether or not to add
/// it to the incoming ops.
async fn check_and_hold<I: Into<AnyDhtHash> + Clone>(
hash: &I,
workspace: &SysValidationWorkspace,
network: HolochainP2pDna,
) -> SysValidationResult<Source> {
let hash: AnyDhtHash = hash.clone().into();
// Create a workspace with just the local stores
let mut local_cascade = workspace.local_cascade();
if let Some(el) = local_cascade
.retrieve(hash.clone(), Default::default())
.await?
{
return Ok(Source::Local(el));
}
// Create a workspace with just the network
let mut network_only_cascade = workspace.full_cascade(network);
match network_only_cascade
.retrieve(hash.clone(), Default::default())
.await?
{
Some(el) => Ok(Source::Network(el.privatized())),
None => Err(ValidationOutcome::NotHoldingDep(hash).into()),
}
}
/// Make a StoreElement DhtOp from an Element.
/// Note that this can fail if the op is missing an
/// Entry when it was supposed to have one.
///
/// Because adding ops to incoming limbo while we are checking them
/// is only faster then waiting for them through gossip we don't care enough
/// to return an error.
fn make_store_element(element: Element) -> Option<(DhtOpHash, DhtOp)> {
// Extract the data
let (shh, element_entry) = element.privatized().into_inner();
let (header, signature) = shh.into_inner();
let header = header.into_content();
// Check the entry
let maybe_entry_box = element_entry.into_option().map(Box::new);
// Create the hash and op
let op = DhtOp::StoreElement(signature, header, maybe_entry_box);
let hash = op.to_hash();
Some((hash, op))
}
/// Make a StoreEntry DhtOp from an Element.
/// Note that this can fail if the op is missing an Entry or
/// the header is the wrong type.
///
/// Because adding ops to incoming limbo while we are checking them
/// is only faster then waiting for them through gossip we don't care enough
/// to return an error.
fn make_store_entry(element: Element) -> Option<(DhtOpHash, DhtOp)> {
// Extract the data
let (shh, element_entry) = element.into_inner();
let (header, signature) = shh.into_inner();
// Check the entry and exit early if it's not there
let entry_box = element_entry.into_option()?.into();
// If the header is the wrong type exit early
let header = header.into_content().try_into().ok()?;
// Create the hash and op
let op = DhtOp::StoreEntry(signature, header, entry_box);
let hash = op.to_hash();
Some((hash, op))
}
/// Make a RegisterAddLink DhtOp from an Element.
/// Note that this can fail if the header is the wrong type
///
/// Because adding ops to incoming limbo while we are checking them
/// is only faster then waiting for them through gossip we don't care enough
/// to return an error.
fn make_register_add_link(element: Element) -> Option<(DhtOpHash, DhtOp)> {
// Extract the data
let (shh, _) = element.into_inner();
let (header, signature) = shh.into_inner();
// If the header is the wrong type exit early
let header = header.into_content().try_into().ok()?;
// Create the hash and op
let op = DhtOp::RegisterAddLink(signature, header);
let hash = op.to_hash();
Some((hash, op))
}
/// Make a RegisterAgentActivity DhtOp from an Element.
/// Note that this can fail if the header is the wrong type
///
/// Because adding ops to incoming limbo while we are checking them
/// is only faster then waiting for them through gossip we don't care enough
/// to return an error.
fn make_register_agent_activity(element: Element) -> Option<(DhtOpHash, DhtOp)> {
// Extract the data
let (shh, _) = element.into_inner();
let (header, signature) = shh.into_inner();
// If the header is the wrong type exit early
let header = header.into_content();
// Create the hash and op
let op = DhtOp::RegisterAgentActivity(signature, header);
let hash = op.to_hash();
Some((hash, op))
}
#[cfg(test)]
pub mod test {
use super::check_countersigning_preflight_response_signature;
use crate::core::sys_validate::error::SysValidationError;
use crate::core::ValidationOutcome;
use arbitrary::Arbitrary;
use fixt::fixt;
use fixt::Predictable;
use hdk::prelude::AgentPubKeyFixturator;
use holochain_keystore::AgentPubKeyExt;
use holochain_state::test_utils::test_keystore;
use holochain_zome_types::countersigning::PreflightResponse;
use matches::assert_matches;
#[tokio::test(flavor = "multi_thread")]
pub async fn test_check_countersigning_preflight_response_signature() |
}
| {
let keystore = test_keystore();
let mut u = arbitrary::Unstructured::new(&[0; 1000]);
let mut preflight_response = PreflightResponse::arbitrary(&mut u).unwrap();
assert_matches!(
check_countersigning_preflight_response_signature(&preflight_response).await,
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::PreflightResponseSignature(_)
))
);
let alice = fixt!(AgentPubKey, Predictable);
let bob = fixt!(AgentPubKey, Predictable, 1);
(*preflight_response.request_mut().signing_agents_mut()).push((alice.clone(), vec![]));
(*preflight_response.request_mut().signing_agents_mut()).push((bob, vec![]));
*preflight_response.signature_mut() = alice
.sign_raw(
&keystore,
preflight_response.encode_for_signature().unwrap().into(),
)
.await
.unwrap();
assert_eq!(
check_countersigning_preflight_response_signature(&preflight_response)
.await
.unwrap(),
(),
);
} |
backup.go | package cli
import (
"github.com/pkg/errors"
"github.com/replicatedhq/kots/pkg/print"
"github.com/replicatedhq/kots/pkg/snapshot"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func BackupCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "backup",
Short: "Provides wrapper functionality to interface with the backup source",
Long: ``,
SilenceUsage: true,
SilenceErrors: false,
PreRun: func(cmd *cobra.Command, args []string) {
viper.BindPFlags(cmd.Flags())
},
RunE: func(cmd *cobra.Command, args []string) error {
v := viper.GetViper()
namespace := v.GetString("namespace")
options := snapshot.CreateInstanceBackupOptions{
Namespace: namespace,
KubernetesConfigFlags: kubernetesConfigFlags,
Wait: v.GetBool("wait"),
}
if err := snapshot.CreateInstanceBackup(options); err != nil {
return errors.Wrap(err, "failed to create instance backup")
}
return nil
},
}
cmd.Flags().StringP("namespace", "n", "default", "namespace in which kots/kotsadm is installed")
cmd.Flags().Bool("wait", true, "wait for the backup to finish")
cmd.AddCommand(BackupListCmd())
return cmd
}
func | () *cobra.Command {
cmd := &cobra.Command{
Use: "ls",
Short: `List available instance backups (this command is deprecated, please use "kubectl kots get backups" instead)`,
Long: ``,
SilenceUsage: true,
SilenceErrors: false,
PreRun: func(cmd *cobra.Command, args []string) {
viper.BindPFlags(cmd.Flags())
},
RunE: func(cmd *cobra.Command, args []string) error {
v := viper.GetViper()
options := snapshot.ListInstanceBackupsOptions{
Namespace: v.GetString("namespace"),
}
backups, err := snapshot.ListInstanceBackups(options)
if err != nil {
return errors.Wrap(err, "failed to list instance backups")
}
print.Backups(backups)
return nil
},
}
cmd.Flags().StringP("namespace", "n", "", "filter by the namespace in which kots/kotsadm is installed")
return cmd
}
| BackupListCmd |
error.rs | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CancelUpdateStackError {
pub kind: CancelUpdateStackErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CancelUpdateStackErrorKind {
TokenAlreadyExistsError(crate::error::TokenAlreadyExistsError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CancelUpdateStackError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CancelUpdateStackErrorKind::TokenAlreadyExistsError(_inner) => _inner.fmt(f),
CancelUpdateStackErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CancelUpdateStackError {
fn code(&self) -> Option<&str> {
CancelUpdateStackError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CancelUpdateStackError {
pub fn new(kind: CancelUpdateStackErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CancelUpdateStackErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CancelUpdateStackErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_token_already_exists_error(&self) -> bool {
matches!(
&self.kind,
CancelUpdateStackErrorKind::TokenAlreadyExistsError(_)
)
}
}
impl std::error::Error for CancelUpdateStackError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CancelUpdateStackErrorKind::TokenAlreadyExistsError(_inner) => Some(_inner),
CancelUpdateStackErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ContinueUpdateRollbackError {
pub kind: ContinueUpdateRollbackErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ContinueUpdateRollbackErrorKind {
TokenAlreadyExistsError(crate::error::TokenAlreadyExistsError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ContinueUpdateRollbackError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ContinueUpdateRollbackErrorKind::TokenAlreadyExistsError(_inner) => _inner.fmt(f),
ContinueUpdateRollbackErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ContinueUpdateRollbackError {
fn code(&self) -> Option<&str> {
ContinueUpdateRollbackError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ContinueUpdateRollbackError {
pub fn new(kind: ContinueUpdateRollbackErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ContinueUpdateRollbackErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ContinueUpdateRollbackErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_token_already_exists_error(&self) -> bool {
matches!(
&self.kind,
ContinueUpdateRollbackErrorKind::TokenAlreadyExistsError(_)
)
}
}
impl std::error::Error for ContinueUpdateRollbackError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ContinueUpdateRollbackErrorKind::TokenAlreadyExistsError(_inner) => Some(_inner),
ContinueUpdateRollbackErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateChangeSetError {
pub kind: CreateChangeSetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateChangeSetErrorKind {
AlreadyExistsError(crate::error::AlreadyExistsError),
InsufficientCapabilitiesError(crate::error::InsufficientCapabilitiesError),
LimitExceededError(crate::error::LimitExceededError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateChangeSetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateChangeSetErrorKind::AlreadyExistsError(_inner) => _inner.fmt(f),
CreateChangeSetErrorKind::InsufficientCapabilitiesError(_inner) => _inner.fmt(f),
CreateChangeSetErrorKind::LimitExceededError(_inner) => _inner.fmt(f),
CreateChangeSetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateChangeSetError {
fn code(&self) -> Option<&str> {
CreateChangeSetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateChangeSetError {
pub fn new(kind: CreateChangeSetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateChangeSetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateChangeSetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_already_exists_error(&self) -> bool {
matches!(&self.kind, CreateChangeSetErrorKind::AlreadyExistsError(_))
}
pub fn is_insufficient_capabilities_error(&self) -> bool {
matches!(
&self.kind,
CreateChangeSetErrorKind::InsufficientCapabilitiesError(_)
)
}
pub fn is_limit_exceeded_error(&self) -> bool {
matches!(&self.kind, CreateChangeSetErrorKind::LimitExceededError(_))
}
}
impl std::error::Error for CreateChangeSetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateChangeSetErrorKind::AlreadyExistsError(_inner) => Some(_inner),
CreateChangeSetErrorKind::InsufficientCapabilitiesError(_inner) => Some(_inner),
CreateChangeSetErrorKind::LimitExceededError(_inner) => Some(_inner),
CreateChangeSetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateStackError {
pub kind: CreateStackErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateStackErrorKind {
AlreadyExistsError(crate::error::AlreadyExistsError),
InsufficientCapabilitiesError(crate::error::InsufficientCapabilitiesError),
LimitExceededError(crate::error::LimitExceededError),
TokenAlreadyExistsError(crate::error::TokenAlreadyExistsError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateStackError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateStackErrorKind::AlreadyExistsError(_inner) => _inner.fmt(f),
CreateStackErrorKind::InsufficientCapabilitiesError(_inner) => _inner.fmt(f),
CreateStackErrorKind::LimitExceededError(_inner) => _inner.fmt(f),
CreateStackErrorKind::TokenAlreadyExistsError(_inner) => _inner.fmt(f),
CreateStackErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateStackError {
fn code(&self) -> Option<&str> {
CreateStackError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateStackError {
pub fn new(kind: CreateStackErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateStackErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateStackErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_already_exists_error(&self) -> bool {
matches!(&self.kind, CreateStackErrorKind::AlreadyExistsError(_))
}
pub fn is_insufficient_capabilities_error(&self) -> bool {
matches!(
&self.kind,
CreateStackErrorKind::InsufficientCapabilitiesError(_)
)
}
pub fn is_limit_exceeded_error(&self) -> bool {
matches!(&self.kind, CreateStackErrorKind::LimitExceededError(_))
}
pub fn is_token_already_exists_error(&self) -> bool {
matches!(&self.kind, CreateStackErrorKind::TokenAlreadyExistsError(_))
}
}
impl std::error::Error for CreateStackError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateStackErrorKind::AlreadyExistsError(_inner) => Some(_inner),
CreateStackErrorKind::InsufficientCapabilitiesError(_inner) => Some(_inner),
CreateStackErrorKind::LimitExceededError(_inner) => Some(_inner),
CreateStackErrorKind::TokenAlreadyExistsError(_inner) => Some(_inner),
CreateStackErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateStackInstancesError {
pub kind: CreateStackInstancesErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateStackInstancesErrorKind {
InvalidOperationError(crate::error::InvalidOperationError),
LimitExceededError(crate::error::LimitExceededError),
OperationIdAlreadyExistsError(crate::error::OperationIdAlreadyExistsError),
OperationInProgressError(crate::error::OperationInProgressError),
StackSetNotFoundError(crate::error::StackSetNotFoundError),
StaleRequestError(crate::error::StaleRequestError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateStackInstancesError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateStackInstancesErrorKind::InvalidOperationError(_inner) => _inner.fmt(f),
CreateStackInstancesErrorKind::LimitExceededError(_inner) => _inner.fmt(f),
CreateStackInstancesErrorKind::OperationIdAlreadyExistsError(_inner) => _inner.fmt(f),
CreateStackInstancesErrorKind::OperationInProgressError(_inner) => _inner.fmt(f),
CreateStackInstancesErrorKind::StackSetNotFoundError(_inner) => _inner.fmt(f),
CreateStackInstancesErrorKind::StaleRequestError(_inner) => _inner.fmt(f),
CreateStackInstancesErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateStackInstancesError {
fn code(&self) -> Option<&str> {
CreateStackInstancesError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateStackInstancesError {
pub fn new(kind: CreateStackInstancesErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateStackInstancesErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateStackInstancesErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_operation_error(&self) -> bool {
matches!(
&self.kind,
CreateStackInstancesErrorKind::InvalidOperationError(_)
)
}
pub fn is_limit_exceeded_error(&self) -> bool {
matches!(
&self.kind,
CreateStackInstancesErrorKind::LimitExceededError(_)
)
}
pub fn is_operation_id_already_exists_error(&self) -> bool {
matches!(
&self.kind,
CreateStackInstancesErrorKind::OperationIdAlreadyExistsError(_)
)
}
pub fn is_operation_in_progress_error(&self) -> bool {
matches!(
&self.kind,
CreateStackInstancesErrorKind::OperationInProgressError(_)
)
}
pub fn is_stack_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
CreateStackInstancesErrorKind::StackSetNotFoundError(_)
)
}
pub fn is_stale_request_error(&self) -> bool {
matches!(
&self.kind,
CreateStackInstancesErrorKind::StaleRequestError(_)
)
}
}
impl std::error::Error for CreateStackInstancesError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateStackInstancesErrorKind::InvalidOperationError(_inner) => Some(_inner),
CreateStackInstancesErrorKind::LimitExceededError(_inner) => Some(_inner),
CreateStackInstancesErrorKind::OperationIdAlreadyExistsError(_inner) => Some(_inner),
CreateStackInstancesErrorKind::OperationInProgressError(_inner) => Some(_inner),
CreateStackInstancesErrorKind::StackSetNotFoundError(_inner) => Some(_inner),
CreateStackInstancesErrorKind::StaleRequestError(_inner) => Some(_inner),
CreateStackInstancesErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateStackSetError {
pub kind: CreateStackSetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateStackSetErrorKind {
CreatedButModifiedError(crate::error::CreatedButModifiedError),
LimitExceededError(crate::error::LimitExceededError),
NameAlreadyExistsError(crate::error::NameAlreadyExistsError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateStackSetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateStackSetErrorKind::CreatedButModifiedError(_inner) => _inner.fmt(f),
CreateStackSetErrorKind::LimitExceededError(_inner) => _inner.fmt(f),
CreateStackSetErrorKind::NameAlreadyExistsError(_inner) => _inner.fmt(f),
CreateStackSetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateStackSetError {
fn code(&self) -> Option<&str> {
CreateStackSetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateStackSetError {
pub fn new(kind: CreateStackSetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateStackSetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateStackSetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_created_but_modified_error(&self) -> bool {
matches!(
&self.kind,
CreateStackSetErrorKind::CreatedButModifiedError(_)
)
}
pub fn is_limit_exceeded_error(&self) -> bool {
matches!(&self.kind, CreateStackSetErrorKind::LimitExceededError(_))
}
pub fn is_name_already_exists_error(&self) -> bool {
matches!(
&self.kind,
CreateStackSetErrorKind::NameAlreadyExistsError(_)
)
}
}
impl std::error::Error for CreateStackSetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateStackSetErrorKind::CreatedButModifiedError(_inner) => Some(_inner),
CreateStackSetErrorKind::LimitExceededError(_inner) => Some(_inner),
CreateStackSetErrorKind::NameAlreadyExistsError(_inner) => Some(_inner),
CreateStackSetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteChangeSetError {
pub kind: DeleteChangeSetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteChangeSetErrorKind {
InvalidChangeSetStatusError(crate::error::InvalidChangeSetStatusError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteChangeSetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteChangeSetErrorKind::InvalidChangeSetStatusError(_inner) => _inner.fmt(f),
DeleteChangeSetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteChangeSetError {
fn code(&self) -> Option<&str> {
DeleteChangeSetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteChangeSetError {
pub fn new(kind: DeleteChangeSetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteChangeSetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteChangeSetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_change_set_status_error(&self) -> bool {
matches!(
&self.kind,
DeleteChangeSetErrorKind::InvalidChangeSetStatusError(_)
)
}
}
impl std::error::Error for DeleteChangeSetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteChangeSetErrorKind::InvalidChangeSetStatusError(_inner) => Some(_inner),
DeleteChangeSetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteStackError {
pub kind: DeleteStackErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteStackErrorKind {
TokenAlreadyExistsError(crate::error::TokenAlreadyExistsError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteStackError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteStackErrorKind::TokenAlreadyExistsError(_inner) => _inner.fmt(f),
DeleteStackErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteStackError {
fn code(&self) -> Option<&str> {
DeleteStackError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteStackError {
pub fn new(kind: DeleteStackErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteStackErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteStackErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_token_already_exists_error(&self) -> bool {
matches!(&self.kind, DeleteStackErrorKind::TokenAlreadyExistsError(_))
}
}
impl std::error::Error for DeleteStackError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteStackErrorKind::TokenAlreadyExistsError(_inner) => Some(_inner),
DeleteStackErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteStackInstancesError {
pub kind: DeleteStackInstancesErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteStackInstancesErrorKind {
InvalidOperationError(crate::error::InvalidOperationError),
OperationIdAlreadyExistsError(crate::error::OperationIdAlreadyExistsError),
OperationInProgressError(crate::error::OperationInProgressError),
StackSetNotFoundError(crate::error::StackSetNotFoundError),
StaleRequestError(crate::error::StaleRequestError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteStackInstancesError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteStackInstancesErrorKind::InvalidOperationError(_inner) => _inner.fmt(f),
DeleteStackInstancesErrorKind::OperationIdAlreadyExistsError(_inner) => _inner.fmt(f),
DeleteStackInstancesErrorKind::OperationInProgressError(_inner) => _inner.fmt(f),
DeleteStackInstancesErrorKind::StackSetNotFoundError(_inner) => _inner.fmt(f),
DeleteStackInstancesErrorKind::StaleRequestError(_inner) => _inner.fmt(f),
DeleteStackInstancesErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteStackInstancesError {
fn code(&self) -> Option<&str> {
DeleteStackInstancesError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteStackInstancesError {
pub fn new(kind: DeleteStackInstancesErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteStackInstancesErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteStackInstancesErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_operation_error(&self) -> bool {
matches!(
&self.kind,
DeleteStackInstancesErrorKind::InvalidOperationError(_)
)
}
pub fn is_operation_id_already_exists_error(&self) -> bool {
matches!(
&self.kind,
DeleteStackInstancesErrorKind::OperationIdAlreadyExistsError(_)
)
}
pub fn is_operation_in_progress_error(&self) -> bool |
pub fn is_stack_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
DeleteStackInstancesErrorKind::StackSetNotFoundError(_)
)
}
pub fn is_stale_request_error(&self) -> bool {
matches!(
&self.kind,
DeleteStackInstancesErrorKind::StaleRequestError(_)
)
}
}
impl std::error::Error for DeleteStackInstancesError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteStackInstancesErrorKind::InvalidOperationError(_inner) => Some(_inner),
DeleteStackInstancesErrorKind::OperationIdAlreadyExistsError(_inner) => Some(_inner),
DeleteStackInstancesErrorKind::OperationInProgressError(_inner) => Some(_inner),
DeleteStackInstancesErrorKind::StackSetNotFoundError(_inner) => Some(_inner),
DeleteStackInstancesErrorKind::StaleRequestError(_inner) => Some(_inner),
DeleteStackInstancesErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteStackSetError {
pub kind: DeleteStackSetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteStackSetErrorKind {
OperationInProgressError(crate::error::OperationInProgressError),
StackSetNotEmptyError(crate::error::StackSetNotEmptyError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteStackSetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteStackSetErrorKind::OperationInProgressError(_inner) => _inner.fmt(f),
DeleteStackSetErrorKind::StackSetNotEmptyError(_inner) => _inner.fmt(f),
DeleteStackSetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteStackSetError {
fn code(&self) -> Option<&str> {
DeleteStackSetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteStackSetError {
pub fn new(kind: DeleteStackSetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteStackSetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteStackSetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_operation_in_progress_error(&self) -> bool {
matches!(
&self.kind,
DeleteStackSetErrorKind::OperationInProgressError(_)
)
}
pub fn is_stack_set_not_empty_error(&self) -> bool {
matches!(
&self.kind,
DeleteStackSetErrorKind::StackSetNotEmptyError(_)
)
}
}
impl std::error::Error for DeleteStackSetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteStackSetErrorKind::OperationInProgressError(_inner) => Some(_inner),
DeleteStackSetErrorKind::StackSetNotEmptyError(_inner) => Some(_inner),
DeleteStackSetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeregisterTypeError {
pub kind: DeregisterTypeErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeregisterTypeErrorKind {
CFNRegistryError(crate::error::CFNRegistryError),
TypeNotFoundError(crate::error::TypeNotFoundError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeregisterTypeError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeregisterTypeErrorKind::CFNRegistryError(_inner) => _inner.fmt(f),
DeregisterTypeErrorKind::TypeNotFoundError(_inner) => _inner.fmt(f),
DeregisterTypeErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeregisterTypeError {
fn code(&self) -> Option<&str> {
DeregisterTypeError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeregisterTypeError {
pub fn new(kind: DeregisterTypeErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeregisterTypeErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeregisterTypeErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_cfn_registry_error(&self) -> bool {
matches!(&self.kind, DeregisterTypeErrorKind::CFNRegistryError(_))
}
pub fn is_type_not_found_error(&self) -> bool {
matches!(&self.kind, DeregisterTypeErrorKind::TypeNotFoundError(_))
}
}
impl std::error::Error for DeregisterTypeError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeregisterTypeErrorKind::CFNRegistryError(_inner) => Some(_inner),
DeregisterTypeErrorKind::TypeNotFoundError(_inner) => Some(_inner),
DeregisterTypeErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeAccountLimitsError {
pub kind: DescribeAccountLimitsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeAccountLimitsErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeAccountLimitsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeAccountLimitsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeAccountLimitsError {
fn code(&self) -> Option<&str> {
DescribeAccountLimitsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeAccountLimitsError {
pub fn new(kind: DescribeAccountLimitsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeAccountLimitsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeAccountLimitsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for DescribeAccountLimitsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeAccountLimitsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeChangeSetError {
pub kind: DescribeChangeSetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeChangeSetErrorKind {
ChangeSetNotFoundError(crate::error::ChangeSetNotFoundError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeChangeSetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeChangeSetErrorKind::ChangeSetNotFoundError(_inner) => _inner.fmt(f),
DescribeChangeSetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeChangeSetError {
fn code(&self) -> Option<&str> {
DescribeChangeSetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeChangeSetError {
pub fn new(kind: DescribeChangeSetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeChangeSetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeChangeSetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_change_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
DescribeChangeSetErrorKind::ChangeSetNotFoundError(_)
)
}
}
impl std::error::Error for DescribeChangeSetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeChangeSetErrorKind::ChangeSetNotFoundError(_inner) => Some(_inner),
DescribeChangeSetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeStackDriftDetectionStatusError {
pub kind: DescribeStackDriftDetectionStatusErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeStackDriftDetectionStatusErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeStackDriftDetectionStatusError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeStackDriftDetectionStatusErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeStackDriftDetectionStatusError {
fn code(&self) -> Option<&str> {
DescribeStackDriftDetectionStatusError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeStackDriftDetectionStatusError {
pub fn new(
kind: DescribeStackDriftDetectionStatusErrorKind,
meta: smithy_types::Error,
) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeStackDriftDetectionStatusErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeStackDriftDetectionStatusErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for DescribeStackDriftDetectionStatusError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeStackDriftDetectionStatusErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeStackEventsError {
pub kind: DescribeStackEventsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeStackEventsErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeStackEventsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeStackEventsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeStackEventsError {
fn code(&self) -> Option<&str> {
DescribeStackEventsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeStackEventsError {
pub fn new(kind: DescribeStackEventsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeStackEventsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeStackEventsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for DescribeStackEventsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeStackEventsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeStackInstanceError {
pub kind: DescribeStackInstanceErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeStackInstanceErrorKind {
StackInstanceNotFoundError(crate::error::StackInstanceNotFoundError),
StackSetNotFoundError(crate::error::StackSetNotFoundError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeStackInstanceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeStackInstanceErrorKind::StackInstanceNotFoundError(_inner) => _inner.fmt(f),
DescribeStackInstanceErrorKind::StackSetNotFoundError(_inner) => _inner.fmt(f),
DescribeStackInstanceErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeStackInstanceError {
fn code(&self) -> Option<&str> {
DescribeStackInstanceError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeStackInstanceError {
pub fn new(kind: DescribeStackInstanceErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeStackInstanceErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeStackInstanceErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_stack_instance_not_found_error(&self) -> bool {
matches!(
&self.kind,
DescribeStackInstanceErrorKind::StackInstanceNotFoundError(_)
)
}
pub fn is_stack_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
DescribeStackInstanceErrorKind::StackSetNotFoundError(_)
)
}
}
impl std::error::Error for DescribeStackInstanceError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeStackInstanceErrorKind::StackInstanceNotFoundError(_inner) => Some(_inner),
DescribeStackInstanceErrorKind::StackSetNotFoundError(_inner) => Some(_inner),
DescribeStackInstanceErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeStackResourceError {
pub kind: DescribeStackResourceErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeStackResourceErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeStackResourceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeStackResourceErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeStackResourceError {
fn code(&self) -> Option<&str> {
DescribeStackResourceError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeStackResourceError {
pub fn new(kind: DescribeStackResourceErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeStackResourceErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeStackResourceErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for DescribeStackResourceError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeStackResourceErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeStackResourceDriftsError {
pub kind: DescribeStackResourceDriftsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeStackResourceDriftsErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeStackResourceDriftsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeStackResourceDriftsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeStackResourceDriftsError {
fn code(&self) -> Option<&str> {
DescribeStackResourceDriftsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeStackResourceDriftsError {
pub fn new(kind: DescribeStackResourceDriftsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeStackResourceDriftsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeStackResourceDriftsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for DescribeStackResourceDriftsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeStackResourceDriftsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeStackResourcesError {
pub kind: DescribeStackResourcesErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeStackResourcesErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeStackResourcesError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeStackResourcesErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeStackResourcesError {
fn code(&self) -> Option<&str> {
DescribeStackResourcesError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeStackResourcesError {
pub fn new(kind: DescribeStackResourcesErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeStackResourcesErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeStackResourcesErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for DescribeStackResourcesError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeStackResourcesErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeStacksError {
pub kind: DescribeStacksErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeStacksErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeStacksError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeStacksErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeStacksError {
fn code(&self) -> Option<&str> {
DescribeStacksError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeStacksError {
pub fn new(kind: DescribeStacksErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeStacksErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeStacksErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for DescribeStacksError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeStacksErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeStackSetError {
pub kind: DescribeStackSetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeStackSetErrorKind {
StackSetNotFoundError(crate::error::StackSetNotFoundError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeStackSetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeStackSetErrorKind::StackSetNotFoundError(_inner) => _inner.fmt(f),
DescribeStackSetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeStackSetError {
fn code(&self) -> Option<&str> {
DescribeStackSetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeStackSetError {
pub fn new(kind: DescribeStackSetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeStackSetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeStackSetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_stack_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
DescribeStackSetErrorKind::StackSetNotFoundError(_)
)
}
}
impl std::error::Error for DescribeStackSetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeStackSetErrorKind::StackSetNotFoundError(_inner) => Some(_inner),
DescribeStackSetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeStackSetOperationError {
pub kind: DescribeStackSetOperationErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeStackSetOperationErrorKind {
OperationNotFoundError(crate::error::OperationNotFoundError),
StackSetNotFoundError(crate::error::StackSetNotFoundError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeStackSetOperationError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeStackSetOperationErrorKind::OperationNotFoundError(_inner) => _inner.fmt(f),
DescribeStackSetOperationErrorKind::StackSetNotFoundError(_inner) => _inner.fmt(f),
DescribeStackSetOperationErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeStackSetOperationError {
fn code(&self) -> Option<&str> {
DescribeStackSetOperationError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeStackSetOperationError {
pub fn new(kind: DescribeStackSetOperationErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeStackSetOperationErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeStackSetOperationErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_operation_not_found_error(&self) -> bool {
matches!(
&self.kind,
DescribeStackSetOperationErrorKind::OperationNotFoundError(_)
)
}
pub fn is_stack_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
DescribeStackSetOperationErrorKind::StackSetNotFoundError(_)
)
}
}
impl std::error::Error for DescribeStackSetOperationError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeStackSetOperationErrorKind::OperationNotFoundError(_inner) => Some(_inner),
DescribeStackSetOperationErrorKind::StackSetNotFoundError(_inner) => Some(_inner),
DescribeStackSetOperationErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeTypeError {
pub kind: DescribeTypeErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeTypeErrorKind {
CFNRegistryError(crate::error::CFNRegistryError),
TypeNotFoundError(crate::error::TypeNotFoundError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeTypeError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeTypeErrorKind::CFNRegistryError(_inner) => _inner.fmt(f),
DescribeTypeErrorKind::TypeNotFoundError(_inner) => _inner.fmt(f),
DescribeTypeErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeTypeError {
fn code(&self) -> Option<&str> {
DescribeTypeError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeTypeError {
pub fn new(kind: DescribeTypeErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeTypeErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeTypeErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_cfn_registry_error(&self) -> bool {
matches!(&self.kind, DescribeTypeErrorKind::CFNRegistryError(_))
}
pub fn is_type_not_found_error(&self) -> bool {
matches!(&self.kind, DescribeTypeErrorKind::TypeNotFoundError(_))
}
}
impl std::error::Error for DescribeTypeError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeTypeErrorKind::CFNRegistryError(_inner) => Some(_inner),
DescribeTypeErrorKind::TypeNotFoundError(_inner) => Some(_inner),
DescribeTypeErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeTypeRegistrationError {
pub kind: DescribeTypeRegistrationErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeTypeRegistrationErrorKind {
CFNRegistryError(crate::error::CFNRegistryError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeTypeRegistrationError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeTypeRegistrationErrorKind::CFNRegistryError(_inner) => _inner.fmt(f),
DescribeTypeRegistrationErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeTypeRegistrationError {
fn code(&self) -> Option<&str> {
DescribeTypeRegistrationError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeTypeRegistrationError {
pub fn new(kind: DescribeTypeRegistrationErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeTypeRegistrationErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeTypeRegistrationErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_cfn_registry_error(&self) -> bool {
matches!(
&self.kind,
DescribeTypeRegistrationErrorKind::CFNRegistryError(_)
)
}
}
impl std::error::Error for DescribeTypeRegistrationError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeTypeRegistrationErrorKind::CFNRegistryError(_inner) => Some(_inner),
DescribeTypeRegistrationErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DetectStackDriftError {
pub kind: DetectStackDriftErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DetectStackDriftErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DetectStackDriftError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DetectStackDriftErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DetectStackDriftError {
fn code(&self) -> Option<&str> {
DetectStackDriftError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DetectStackDriftError {
pub fn new(kind: DetectStackDriftErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DetectStackDriftErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DetectStackDriftErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for DetectStackDriftError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DetectStackDriftErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DetectStackResourceDriftError {
pub kind: DetectStackResourceDriftErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DetectStackResourceDriftErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DetectStackResourceDriftError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DetectStackResourceDriftErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DetectStackResourceDriftError {
fn code(&self) -> Option<&str> {
DetectStackResourceDriftError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DetectStackResourceDriftError {
pub fn new(kind: DetectStackResourceDriftErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DetectStackResourceDriftErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DetectStackResourceDriftErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for DetectStackResourceDriftError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DetectStackResourceDriftErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DetectStackSetDriftError {
pub kind: DetectStackSetDriftErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DetectStackSetDriftErrorKind {
InvalidOperationError(crate::error::InvalidOperationError),
OperationInProgressError(crate::error::OperationInProgressError),
StackSetNotFoundError(crate::error::StackSetNotFoundError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DetectStackSetDriftError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DetectStackSetDriftErrorKind::InvalidOperationError(_inner) => _inner.fmt(f),
DetectStackSetDriftErrorKind::OperationInProgressError(_inner) => _inner.fmt(f),
DetectStackSetDriftErrorKind::StackSetNotFoundError(_inner) => _inner.fmt(f),
DetectStackSetDriftErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DetectStackSetDriftError {
fn code(&self) -> Option<&str> {
DetectStackSetDriftError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DetectStackSetDriftError {
pub fn new(kind: DetectStackSetDriftErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DetectStackSetDriftErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DetectStackSetDriftErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_operation_error(&self) -> bool {
matches!(
&self.kind,
DetectStackSetDriftErrorKind::InvalidOperationError(_)
)
}
pub fn is_operation_in_progress_error(&self) -> bool {
matches!(
&self.kind,
DetectStackSetDriftErrorKind::OperationInProgressError(_)
)
}
pub fn is_stack_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
DetectStackSetDriftErrorKind::StackSetNotFoundError(_)
)
}
}
impl std::error::Error for DetectStackSetDriftError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DetectStackSetDriftErrorKind::InvalidOperationError(_inner) => Some(_inner),
DetectStackSetDriftErrorKind::OperationInProgressError(_inner) => Some(_inner),
DetectStackSetDriftErrorKind::StackSetNotFoundError(_inner) => Some(_inner),
DetectStackSetDriftErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct EstimateTemplateCostError {
pub kind: EstimateTemplateCostErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum EstimateTemplateCostErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for EstimateTemplateCostError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
EstimateTemplateCostErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for EstimateTemplateCostError {
fn code(&self) -> Option<&str> {
EstimateTemplateCostError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl EstimateTemplateCostError {
pub fn new(kind: EstimateTemplateCostErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: EstimateTemplateCostErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: EstimateTemplateCostErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for EstimateTemplateCostError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
EstimateTemplateCostErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ExecuteChangeSetError {
pub kind: ExecuteChangeSetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ExecuteChangeSetErrorKind {
ChangeSetNotFoundError(crate::error::ChangeSetNotFoundError),
InsufficientCapabilitiesError(crate::error::InsufficientCapabilitiesError),
InvalidChangeSetStatusError(crate::error::InvalidChangeSetStatusError),
TokenAlreadyExistsError(crate::error::TokenAlreadyExistsError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ExecuteChangeSetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ExecuteChangeSetErrorKind::ChangeSetNotFoundError(_inner) => _inner.fmt(f),
ExecuteChangeSetErrorKind::InsufficientCapabilitiesError(_inner) => _inner.fmt(f),
ExecuteChangeSetErrorKind::InvalidChangeSetStatusError(_inner) => _inner.fmt(f),
ExecuteChangeSetErrorKind::TokenAlreadyExistsError(_inner) => _inner.fmt(f),
ExecuteChangeSetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ExecuteChangeSetError {
fn code(&self) -> Option<&str> {
ExecuteChangeSetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ExecuteChangeSetError {
pub fn new(kind: ExecuteChangeSetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ExecuteChangeSetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ExecuteChangeSetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_change_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
ExecuteChangeSetErrorKind::ChangeSetNotFoundError(_)
)
}
pub fn is_insufficient_capabilities_error(&self) -> bool {
matches!(
&self.kind,
ExecuteChangeSetErrorKind::InsufficientCapabilitiesError(_)
)
}
pub fn is_invalid_change_set_status_error(&self) -> bool {
matches!(
&self.kind,
ExecuteChangeSetErrorKind::InvalidChangeSetStatusError(_)
)
}
pub fn is_token_already_exists_error(&self) -> bool {
matches!(
&self.kind,
ExecuteChangeSetErrorKind::TokenAlreadyExistsError(_)
)
}
}
impl std::error::Error for ExecuteChangeSetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ExecuteChangeSetErrorKind::ChangeSetNotFoundError(_inner) => Some(_inner),
ExecuteChangeSetErrorKind::InsufficientCapabilitiesError(_inner) => Some(_inner),
ExecuteChangeSetErrorKind::InvalidChangeSetStatusError(_inner) => Some(_inner),
ExecuteChangeSetErrorKind::TokenAlreadyExistsError(_inner) => Some(_inner),
ExecuteChangeSetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetStackPolicyError {
pub kind: GetStackPolicyErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetStackPolicyErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetStackPolicyError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetStackPolicyErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetStackPolicyError {
fn code(&self) -> Option<&str> {
GetStackPolicyError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetStackPolicyError {
pub fn new(kind: GetStackPolicyErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetStackPolicyErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetStackPolicyErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for GetStackPolicyError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetStackPolicyErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetTemplateError {
pub kind: GetTemplateErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetTemplateErrorKind {
ChangeSetNotFoundError(crate::error::ChangeSetNotFoundError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetTemplateError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetTemplateErrorKind::ChangeSetNotFoundError(_inner) => _inner.fmt(f),
GetTemplateErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetTemplateError {
fn code(&self) -> Option<&str> {
GetTemplateError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetTemplateError {
pub fn new(kind: GetTemplateErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetTemplateErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetTemplateErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_change_set_not_found_error(&self) -> bool {
matches!(&self.kind, GetTemplateErrorKind::ChangeSetNotFoundError(_))
}
}
impl std::error::Error for GetTemplateError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetTemplateErrorKind::ChangeSetNotFoundError(_inner) => Some(_inner),
GetTemplateErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct GetTemplateSummaryError {
pub kind: GetTemplateSummaryErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum GetTemplateSummaryErrorKind {
StackSetNotFoundError(crate::error::StackSetNotFoundError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for GetTemplateSummaryError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
GetTemplateSummaryErrorKind::StackSetNotFoundError(_inner) => _inner.fmt(f),
GetTemplateSummaryErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for GetTemplateSummaryError {
fn code(&self) -> Option<&str> {
GetTemplateSummaryError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl GetTemplateSummaryError {
pub fn new(kind: GetTemplateSummaryErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: GetTemplateSummaryErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: GetTemplateSummaryErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_stack_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
GetTemplateSummaryErrorKind::StackSetNotFoundError(_)
)
}
}
impl std::error::Error for GetTemplateSummaryError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
GetTemplateSummaryErrorKind::StackSetNotFoundError(_inner) => Some(_inner),
GetTemplateSummaryErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListChangeSetsError {
pub kind: ListChangeSetsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListChangeSetsErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListChangeSetsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListChangeSetsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListChangeSetsError {
fn code(&self) -> Option<&str> {
ListChangeSetsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListChangeSetsError {
pub fn new(kind: ListChangeSetsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListChangeSetsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListChangeSetsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for ListChangeSetsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListChangeSetsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListExportsError {
pub kind: ListExportsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListExportsErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListExportsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListExportsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListExportsError {
fn code(&self) -> Option<&str> {
ListExportsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListExportsError {
pub fn new(kind: ListExportsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListExportsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListExportsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for ListExportsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListExportsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListImportsError {
pub kind: ListImportsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListImportsErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListImportsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListImportsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListImportsError {
fn code(&self) -> Option<&str> {
ListImportsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListImportsError {
pub fn new(kind: ListImportsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListImportsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListImportsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for ListImportsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListImportsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListStackInstancesError {
pub kind: ListStackInstancesErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListStackInstancesErrorKind {
StackSetNotFoundError(crate::error::StackSetNotFoundError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListStackInstancesError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListStackInstancesErrorKind::StackSetNotFoundError(_inner) => _inner.fmt(f),
ListStackInstancesErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListStackInstancesError {
fn code(&self) -> Option<&str> {
ListStackInstancesError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListStackInstancesError {
pub fn new(kind: ListStackInstancesErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListStackInstancesErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListStackInstancesErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_stack_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
ListStackInstancesErrorKind::StackSetNotFoundError(_)
)
}
}
impl std::error::Error for ListStackInstancesError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListStackInstancesErrorKind::StackSetNotFoundError(_inner) => Some(_inner),
ListStackInstancesErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListStackResourcesError {
pub kind: ListStackResourcesErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListStackResourcesErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListStackResourcesError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListStackResourcesErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListStackResourcesError {
fn code(&self) -> Option<&str> {
ListStackResourcesError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListStackResourcesError {
pub fn new(kind: ListStackResourcesErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListStackResourcesErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListStackResourcesErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for ListStackResourcesError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListStackResourcesErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListStacksError {
pub kind: ListStacksErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListStacksErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListStacksError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListStacksErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListStacksError {
fn code(&self) -> Option<&str> {
ListStacksError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListStacksError {
pub fn new(kind: ListStacksErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListStacksErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListStacksErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for ListStacksError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListStacksErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListStackSetOperationResultsError {
pub kind: ListStackSetOperationResultsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListStackSetOperationResultsErrorKind {
OperationNotFoundError(crate::error::OperationNotFoundError),
StackSetNotFoundError(crate::error::StackSetNotFoundError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListStackSetOperationResultsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListStackSetOperationResultsErrorKind::OperationNotFoundError(_inner) => _inner.fmt(f),
ListStackSetOperationResultsErrorKind::StackSetNotFoundError(_inner) => _inner.fmt(f),
ListStackSetOperationResultsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListStackSetOperationResultsError {
fn code(&self) -> Option<&str> {
ListStackSetOperationResultsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListStackSetOperationResultsError {
pub fn new(kind: ListStackSetOperationResultsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListStackSetOperationResultsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListStackSetOperationResultsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_operation_not_found_error(&self) -> bool {
matches!(
&self.kind,
ListStackSetOperationResultsErrorKind::OperationNotFoundError(_)
)
}
pub fn is_stack_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
ListStackSetOperationResultsErrorKind::StackSetNotFoundError(_)
)
}
}
impl std::error::Error for ListStackSetOperationResultsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListStackSetOperationResultsErrorKind::OperationNotFoundError(_inner) => Some(_inner),
ListStackSetOperationResultsErrorKind::StackSetNotFoundError(_inner) => Some(_inner),
ListStackSetOperationResultsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListStackSetOperationsError {
pub kind: ListStackSetOperationsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListStackSetOperationsErrorKind {
StackSetNotFoundError(crate::error::StackSetNotFoundError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListStackSetOperationsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListStackSetOperationsErrorKind::StackSetNotFoundError(_inner) => _inner.fmt(f),
ListStackSetOperationsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListStackSetOperationsError {
fn code(&self) -> Option<&str> {
ListStackSetOperationsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListStackSetOperationsError {
pub fn new(kind: ListStackSetOperationsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListStackSetOperationsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListStackSetOperationsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_stack_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
ListStackSetOperationsErrorKind::StackSetNotFoundError(_)
)
}
}
impl std::error::Error for ListStackSetOperationsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListStackSetOperationsErrorKind::StackSetNotFoundError(_inner) => Some(_inner),
ListStackSetOperationsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListStackSetsError {
pub kind: ListStackSetsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListStackSetsErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListStackSetsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListStackSetsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListStackSetsError {
fn code(&self) -> Option<&str> {
ListStackSetsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListStackSetsError {
pub fn new(kind: ListStackSetsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListStackSetsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListStackSetsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for ListStackSetsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListStackSetsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListTypeRegistrationsError {
pub kind: ListTypeRegistrationsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListTypeRegistrationsErrorKind {
CFNRegistryError(crate::error::CFNRegistryError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListTypeRegistrationsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListTypeRegistrationsErrorKind::CFNRegistryError(_inner) => _inner.fmt(f),
ListTypeRegistrationsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListTypeRegistrationsError {
fn code(&self) -> Option<&str> {
ListTypeRegistrationsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListTypeRegistrationsError {
pub fn new(kind: ListTypeRegistrationsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListTypeRegistrationsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListTypeRegistrationsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_cfn_registry_error(&self) -> bool {
matches!(
&self.kind,
ListTypeRegistrationsErrorKind::CFNRegistryError(_)
)
}
}
impl std::error::Error for ListTypeRegistrationsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListTypeRegistrationsErrorKind::CFNRegistryError(_inner) => Some(_inner),
ListTypeRegistrationsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListTypesError {
pub kind: ListTypesErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListTypesErrorKind {
CFNRegistryError(crate::error::CFNRegistryError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListTypesError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListTypesErrorKind::CFNRegistryError(_inner) => _inner.fmt(f),
ListTypesErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListTypesError {
fn code(&self) -> Option<&str> {
ListTypesError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListTypesError {
pub fn new(kind: ListTypesErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListTypesErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListTypesErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_cfn_registry_error(&self) -> bool {
matches!(&self.kind, ListTypesErrorKind::CFNRegistryError(_))
}
}
impl std::error::Error for ListTypesError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListTypesErrorKind::CFNRegistryError(_inner) => Some(_inner),
ListTypesErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListTypeVersionsError {
pub kind: ListTypeVersionsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListTypeVersionsErrorKind {
CFNRegistryError(crate::error::CFNRegistryError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListTypeVersionsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListTypeVersionsErrorKind::CFNRegistryError(_inner) => _inner.fmt(f),
ListTypeVersionsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListTypeVersionsError {
fn code(&self) -> Option<&str> {
ListTypeVersionsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListTypeVersionsError {
pub fn new(kind: ListTypeVersionsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListTypeVersionsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListTypeVersionsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_cfn_registry_error(&self) -> bool {
matches!(&self.kind, ListTypeVersionsErrorKind::CFNRegistryError(_))
}
}
impl std::error::Error for ListTypeVersionsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListTypeVersionsErrorKind::CFNRegistryError(_inner) => Some(_inner),
ListTypeVersionsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct RecordHandlerProgressError {
pub kind: RecordHandlerProgressErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum RecordHandlerProgressErrorKind {
InvalidStateTransitionError(crate::error::InvalidStateTransitionError),
OperationStatusCheckFailedError(crate::error::OperationStatusCheckFailedError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for RecordHandlerProgressError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
RecordHandlerProgressErrorKind::InvalidStateTransitionError(_inner) => _inner.fmt(f),
RecordHandlerProgressErrorKind::OperationStatusCheckFailedError(_inner) => {
_inner.fmt(f)
}
RecordHandlerProgressErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for RecordHandlerProgressError {
fn code(&self) -> Option<&str> {
RecordHandlerProgressError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl RecordHandlerProgressError {
pub fn new(kind: RecordHandlerProgressErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: RecordHandlerProgressErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: RecordHandlerProgressErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_state_transition_error(&self) -> bool {
matches!(
&self.kind,
RecordHandlerProgressErrorKind::InvalidStateTransitionError(_)
)
}
pub fn is_operation_status_check_failed_error(&self) -> bool {
matches!(
&self.kind,
RecordHandlerProgressErrorKind::OperationStatusCheckFailedError(_)
)
}
}
impl std::error::Error for RecordHandlerProgressError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
RecordHandlerProgressErrorKind::InvalidStateTransitionError(_inner) => Some(_inner),
RecordHandlerProgressErrorKind::OperationStatusCheckFailedError(_inner) => Some(_inner),
RecordHandlerProgressErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct RegisterTypeError {
pub kind: RegisterTypeErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum RegisterTypeErrorKind {
CFNRegistryError(crate::error::CFNRegistryError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for RegisterTypeError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
RegisterTypeErrorKind::CFNRegistryError(_inner) => _inner.fmt(f),
RegisterTypeErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for RegisterTypeError {
fn code(&self) -> Option<&str> {
RegisterTypeError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl RegisterTypeError {
pub fn new(kind: RegisterTypeErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: RegisterTypeErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: RegisterTypeErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_cfn_registry_error(&self) -> bool {
matches!(&self.kind, RegisterTypeErrorKind::CFNRegistryError(_))
}
}
impl std::error::Error for RegisterTypeError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
RegisterTypeErrorKind::CFNRegistryError(_inner) => Some(_inner),
RegisterTypeErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct SetStackPolicyError {
pub kind: SetStackPolicyErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum SetStackPolicyErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for SetStackPolicyError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
SetStackPolicyErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for SetStackPolicyError {
fn code(&self) -> Option<&str> {
SetStackPolicyError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl SetStackPolicyError {
pub fn new(kind: SetStackPolicyErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: SetStackPolicyErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: SetStackPolicyErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for SetStackPolicyError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
SetStackPolicyErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct SetTypeDefaultVersionError {
pub kind: SetTypeDefaultVersionErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum SetTypeDefaultVersionErrorKind {
CFNRegistryError(crate::error::CFNRegistryError),
TypeNotFoundError(crate::error::TypeNotFoundError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for SetTypeDefaultVersionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
SetTypeDefaultVersionErrorKind::CFNRegistryError(_inner) => _inner.fmt(f),
SetTypeDefaultVersionErrorKind::TypeNotFoundError(_inner) => _inner.fmt(f),
SetTypeDefaultVersionErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for SetTypeDefaultVersionError {
fn code(&self) -> Option<&str> {
SetTypeDefaultVersionError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl SetTypeDefaultVersionError {
pub fn new(kind: SetTypeDefaultVersionErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: SetTypeDefaultVersionErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: SetTypeDefaultVersionErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_cfn_registry_error(&self) -> bool {
matches!(
&self.kind,
SetTypeDefaultVersionErrorKind::CFNRegistryError(_)
)
}
pub fn is_type_not_found_error(&self) -> bool {
matches!(
&self.kind,
SetTypeDefaultVersionErrorKind::TypeNotFoundError(_)
)
}
}
impl std::error::Error for SetTypeDefaultVersionError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
SetTypeDefaultVersionErrorKind::CFNRegistryError(_inner) => Some(_inner),
SetTypeDefaultVersionErrorKind::TypeNotFoundError(_inner) => Some(_inner),
SetTypeDefaultVersionErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct SignalResourceError {
pub kind: SignalResourceErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum SignalResourceErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for SignalResourceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
SignalResourceErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for SignalResourceError {
fn code(&self) -> Option<&str> {
SignalResourceError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl SignalResourceError {
pub fn new(kind: SignalResourceErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: SignalResourceErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: SignalResourceErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for SignalResourceError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
SignalResourceErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct StopStackSetOperationError {
pub kind: StopStackSetOperationErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum StopStackSetOperationErrorKind {
InvalidOperationError(crate::error::InvalidOperationError),
OperationNotFoundError(crate::error::OperationNotFoundError),
StackSetNotFoundError(crate::error::StackSetNotFoundError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for StopStackSetOperationError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
StopStackSetOperationErrorKind::InvalidOperationError(_inner) => _inner.fmt(f),
StopStackSetOperationErrorKind::OperationNotFoundError(_inner) => _inner.fmt(f),
StopStackSetOperationErrorKind::StackSetNotFoundError(_inner) => _inner.fmt(f),
StopStackSetOperationErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for StopStackSetOperationError {
fn code(&self) -> Option<&str> {
StopStackSetOperationError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl StopStackSetOperationError {
pub fn new(kind: StopStackSetOperationErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: StopStackSetOperationErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: StopStackSetOperationErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_operation_error(&self) -> bool {
matches!(
&self.kind,
StopStackSetOperationErrorKind::InvalidOperationError(_)
)
}
pub fn is_operation_not_found_error(&self) -> bool {
matches!(
&self.kind,
StopStackSetOperationErrorKind::OperationNotFoundError(_)
)
}
pub fn is_stack_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
StopStackSetOperationErrorKind::StackSetNotFoundError(_)
)
}
}
impl std::error::Error for StopStackSetOperationError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
StopStackSetOperationErrorKind::InvalidOperationError(_inner) => Some(_inner),
StopStackSetOperationErrorKind::OperationNotFoundError(_inner) => Some(_inner),
StopStackSetOperationErrorKind::StackSetNotFoundError(_inner) => Some(_inner),
StopStackSetOperationErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateStackError {
pub kind: UpdateStackErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateStackErrorKind {
InsufficientCapabilitiesError(crate::error::InsufficientCapabilitiesError),
TokenAlreadyExistsError(crate::error::TokenAlreadyExistsError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateStackError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateStackErrorKind::InsufficientCapabilitiesError(_inner) => _inner.fmt(f),
UpdateStackErrorKind::TokenAlreadyExistsError(_inner) => _inner.fmt(f),
UpdateStackErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateStackError {
fn code(&self) -> Option<&str> {
UpdateStackError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateStackError {
pub fn new(kind: UpdateStackErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateStackErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateStackErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_insufficient_capabilities_error(&self) -> bool {
matches!(
&self.kind,
UpdateStackErrorKind::InsufficientCapabilitiesError(_)
)
}
pub fn is_token_already_exists_error(&self) -> bool {
matches!(&self.kind, UpdateStackErrorKind::TokenAlreadyExistsError(_))
}
}
impl std::error::Error for UpdateStackError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateStackErrorKind::InsufficientCapabilitiesError(_inner) => Some(_inner),
UpdateStackErrorKind::TokenAlreadyExistsError(_inner) => Some(_inner),
UpdateStackErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateStackInstancesError {
pub kind: UpdateStackInstancesErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateStackInstancesErrorKind {
InvalidOperationError(crate::error::InvalidOperationError),
OperationIdAlreadyExistsError(crate::error::OperationIdAlreadyExistsError),
OperationInProgressError(crate::error::OperationInProgressError),
StackInstanceNotFoundError(crate::error::StackInstanceNotFoundError),
StackSetNotFoundError(crate::error::StackSetNotFoundError),
StaleRequestError(crate::error::StaleRequestError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateStackInstancesError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateStackInstancesErrorKind::InvalidOperationError(_inner) => _inner.fmt(f),
UpdateStackInstancesErrorKind::OperationIdAlreadyExistsError(_inner) => _inner.fmt(f),
UpdateStackInstancesErrorKind::OperationInProgressError(_inner) => _inner.fmt(f),
UpdateStackInstancesErrorKind::StackInstanceNotFoundError(_inner) => _inner.fmt(f),
UpdateStackInstancesErrorKind::StackSetNotFoundError(_inner) => _inner.fmt(f),
UpdateStackInstancesErrorKind::StaleRequestError(_inner) => _inner.fmt(f),
UpdateStackInstancesErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateStackInstancesError {
fn code(&self) -> Option<&str> {
UpdateStackInstancesError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateStackInstancesError {
pub fn new(kind: UpdateStackInstancesErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateStackInstancesErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateStackInstancesErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_operation_error(&self) -> bool {
matches!(
&self.kind,
UpdateStackInstancesErrorKind::InvalidOperationError(_)
)
}
pub fn is_operation_id_already_exists_error(&self) -> bool {
matches!(
&self.kind,
UpdateStackInstancesErrorKind::OperationIdAlreadyExistsError(_)
)
}
pub fn is_operation_in_progress_error(&self) -> bool {
matches!(
&self.kind,
UpdateStackInstancesErrorKind::OperationInProgressError(_)
)
}
pub fn is_stack_instance_not_found_error(&self) -> bool {
matches!(
&self.kind,
UpdateStackInstancesErrorKind::StackInstanceNotFoundError(_)
)
}
pub fn is_stack_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
UpdateStackInstancesErrorKind::StackSetNotFoundError(_)
)
}
pub fn is_stale_request_error(&self) -> bool {
matches!(
&self.kind,
UpdateStackInstancesErrorKind::StaleRequestError(_)
)
}
}
impl std::error::Error for UpdateStackInstancesError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateStackInstancesErrorKind::InvalidOperationError(_inner) => Some(_inner),
UpdateStackInstancesErrorKind::OperationIdAlreadyExistsError(_inner) => Some(_inner),
UpdateStackInstancesErrorKind::OperationInProgressError(_inner) => Some(_inner),
UpdateStackInstancesErrorKind::StackInstanceNotFoundError(_inner) => Some(_inner),
UpdateStackInstancesErrorKind::StackSetNotFoundError(_inner) => Some(_inner),
UpdateStackInstancesErrorKind::StaleRequestError(_inner) => Some(_inner),
UpdateStackInstancesErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateStackSetError {
pub kind: UpdateStackSetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateStackSetErrorKind {
InvalidOperationError(crate::error::InvalidOperationError),
OperationIdAlreadyExistsError(crate::error::OperationIdAlreadyExistsError),
OperationInProgressError(crate::error::OperationInProgressError),
StackInstanceNotFoundError(crate::error::StackInstanceNotFoundError),
StackSetNotFoundError(crate::error::StackSetNotFoundError),
StaleRequestError(crate::error::StaleRequestError),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateStackSetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateStackSetErrorKind::InvalidOperationError(_inner) => _inner.fmt(f),
UpdateStackSetErrorKind::OperationIdAlreadyExistsError(_inner) => _inner.fmt(f),
UpdateStackSetErrorKind::OperationInProgressError(_inner) => _inner.fmt(f),
UpdateStackSetErrorKind::StackInstanceNotFoundError(_inner) => _inner.fmt(f),
UpdateStackSetErrorKind::StackSetNotFoundError(_inner) => _inner.fmt(f),
UpdateStackSetErrorKind::StaleRequestError(_inner) => _inner.fmt(f),
UpdateStackSetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateStackSetError {
fn code(&self) -> Option<&str> {
UpdateStackSetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateStackSetError {
pub fn new(kind: UpdateStackSetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateStackSetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateStackSetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_operation_error(&self) -> bool {
matches!(
&self.kind,
UpdateStackSetErrorKind::InvalidOperationError(_)
)
}
pub fn is_operation_id_already_exists_error(&self) -> bool {
matches!(
&self.kind,
UpdateStackSetErrorKind::OperationIdAlreadyExistsError(_)
)
}
pub fn is_operation_in_progress_error(&self) -> bool {
matches!(
&self.kind,
UpdateStackSetErrorKind::OperationInProgressError(_)
)
}
pub fn is_stack_instance_not_found_error(&self) -> bool {
matches!(
&self.kind,
UpdateStackSetErrorKind::StackInstanceNotFoundError(_)
)
}
pub fn is_stack_set_not_found_error(&self) -> bool {
matches!(
&self.kind,
UpdateStackSetErrorKind::StackSetNotFoundError(_)
)
}
pub fn is_stale_request_error(&self) -> bool {
matches!(&self.kind, UpdateStackSetErrorKind::StaleRequestError(_))
}
}
impl std::error::Error for UpdateStackSetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateStackSetErrorKind::InvalidOperationError(_inner) => Some(_inner),
UpdateStackSetErrorKind::OperationIdAlreadyExistsError(_inner) => Some(_inner),
UpdateStackSetErrorKind::OperationInProgressError(_inner) => Some(_inner),
UpdateStackSetErrorKind::StackInstanceNotFoundError(_inner) => Some(_inner),
UpdateStackSetErrorKind::StackSetNotFoundError(_inner) => Some(_inner),
UpdateStackSetErrorKind::StaleRequestError(_inner) => Some(_inner),
UpdateStackSetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateTerminationProtectionError {
pub kind: UpdateTerminationProtectionErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateTerminationProtectionErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateTerminationProtectionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateTerminationProtectionErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateTerminationProtectionError {
fn code(&self) -> Option<&str> {
UpdateTerminationProtectionError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateTerminationProtectionError {
pub fn new(kind: UpdateTerminationProtectionErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateTerminationProtectionErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateTerminationProtectionErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for UpdateTerminationProtectionError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateTerminationProtectionErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ValidateTemplateError {
pub kind: ValidateTemplateErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ValidateTemplateErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ValidateTemplateError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ValidateTemplateErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ValidateTemplateError {
fn code(&self) -> Option<&str> {
ValidateTemplateError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ValidateTemplateError {
pub fn new(kind: ValidateTemplateErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ValidateTemplateErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ValidateTemplateErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented
// by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for ValidateTemplateError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ValidateTemplateErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
/// <p>Another operation has been performed on this stack set since the specified operation
/// was performed. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct StaleRequestError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for StaleRequestError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("StaleRequestError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl StaleRequestError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for StaleRequestError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "StaleRequestError [StaleRequestException]")?;
if let Some(inner_1) = &self.message {
write!(f, ": {}", inner_1)?;
}
Ok(())
}
}
impl std::error::Error for StaleRequestError {}
/// See [`StaleRequestError`](crate::error::StaleRequestError)
pub mod stale_request_error {
/// A builder for [`StaleRequestError`](crate::error::StaleRequestError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`StaleRequestError`](crate::error::StaleRequestError)
pub fn build(self) -> crate::error::StaleRequestError {
crate::error::StaleRequestError {
message: self.message,
}
}
}
}
impl StaleRequestError {
/// Creates a new builder-style object to manufacture [`StaleRequestError`](crate::error::StaleRequestError)
pub fn builder() -> crate::error::stale_request_error::Builder {
crate::error::stale_request_error::Builder::default()
}
}
/// <p>The specified stack set doesn't exist.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct StackSetNotFoundError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for StackSetNotFoundError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("StackSetNotFoundError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl StackSetNotFoundError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for StackSetNotFoundError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "StackSetNotFoundError [StackSetNotFoundException]")?;
if let Some(inner_2) = &self.message {
write!(f, ": {}", inner_2)?;
}
Ok(())
}
}
impl std::error::Error for StackSetNotFoundError {}
/// See [`StackSetNotFoundError`](crate::error::StackSetNotFoundError)
pub mod stack_set_not_found_error {
/// A builder for [`StackSetNotFoundError`](crate::error::StackSetNotFoundError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`StackSetNotFoundError`](crate::error::StackSetNotFoundError)
pub fn build(self) -> crate::error::StackSetNotFoundError {
crate::error::StackSetNotFoundError {
message: self.message,
}
}
}
}
impl StackSetNotFoundError {
/// Creates a new builder-style object to manufacture [`StackSetNotFoundError`](crate::error::StackSetNotFoundError)
pub fn builder() -> crate::error::stack_set_not_found_error::Builder {
crate::error::stack_set_not_found_error::Builder::default()
}
}
/// <p>The specified stack instance doesn't exist.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct StackInstanceNotFoundError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for StackInstanceNotFoundError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("StackInstanceNotFoundError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl StackInstanceNotFoundError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for StackInstanceNotFoundError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"StackInstanceNotFoundError [StackInstanceNotFoundException]"
)?;
if let Some(inner_3) = &self.message {
write!(f, ": {}", inner_3)?;
}
Ok(())
}
}
impl std::error::Error for StackInstanceNotFoundError {}
/// See [`StackInstanceNotFoundError`](crate::error::StackInstanceNotFoundError)
pub mod stack_instance_not_found_error {
/// A builder for [`StackInstanceNotFoundError`](crate::error::StackInstanceNotFoundError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`StackInstanceNotFoundError`](crate::error::StackInstanceNotFoundError)
pub fn build(self) -> crate::error::StackInstanceNotFoundError {
crate::error::StackInstanceNotFoundError {
message: self.message,
}
}
}
}
impl StackInstanceNotFoundError {
/// Creates a new builder-style object to manufacture [`StackInstanceNotFoundError`](crate::error::StackInstanceNotFoundError)
pub fn builder() -> crate::error::stack_instance_not_found_error::Builder {
crate::error::stack_instance_not_found_error::Builder::default()
}
}
/// <p>Another operation is currently in progress for this stack set. Only one operation can
/// be performed for a stack set at a given time.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct OperationInProgressError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for OperationInProgressError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("OperationInProgressError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl OperationInProgressError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for OperationInProgressError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "OperationInProgressError [OperationInProgressException]")?;
if let Some(inner_4) = &self.message {
write!(f, ": {}", inner_4)?;
}
Ok(())
}
}
impl std::error::Error for OperationInProgressError {}
/// See [`OperationInProgressError`](crate::error::OperationInProgressError)
pub mod operation_in_progress_error {
/// A builder for [`OperationInProgressError`](crate::error::OperationInProgressError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`OperationInProgressError`](crate::error::OperationInProgressError)
pub fn build(self) -> crate::error::OperationInProgressError {
crate::error::OperationInProgressError {
message: self.message,
}
}
}
}
impl OperationInProgressError {
/// Creates a new builder-style object to manufacture [`OperationInProgressError`](crate::error::OperationInProgressError)
pub fn builder() -> crate::error::operation_in_progress_error::Builder {
crate::error::operation_in_progress_error::Builder::default()
}
}
/// <p>The specified operation ID already exists.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct OperationIdAlreadyExistsError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for OperationIdAlreadyExistsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("OperationIdAlreadyExistsError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl OperationIdAlreadyExistsError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for OperationIdAlreadyExistsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"OperationIdAlreadyExistsError [OperationIdAlreadyExistsException]"
)?;
if let Some(inner_5) = &self.message {
write!(f, ": {}", inner_5)?;
}
Ok(())
}
}
impl std::error::Error for OperationIdAlreadyExistsError {}
/// See [`OperationIdAlreadyExistsError`](crate::error::OperationIdAlreadyExistsError)
pub mod operation_id_already_exists_error {
/// A builder for [`OperationIdAlreadyExistsError`](crate::error::OperationIdAlreadyExistsError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`OperationIdAlreadyExistsError`](crate::error::OperationIdAlreadyExistsError)
pub fn build(self) -> crate::error::OperationIdAlreadyExistsError {
crate::error::OperationIdAlreadyExistsError {
message: self.message,
}
}
}
}
impl OperationIdAlreadyExistsError {
/// Creates a new builder-style object to manufacture [`OperationIdAlreadyExistsError`](crate::error::OperationIdAlreadyExistsError)
pub fn builder() -> crate::error::operation_id_already_exists_error::Builder {
crate::error::operation_id_already_exists_error::Builder::default()
}
}
/// <p>The specified operation isn't valid.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct InvalidOperationError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for InvalidOperationError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("InvalidOperationError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl InvalidOperationError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for InvalidOperationError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "InvalidOperationError [InvalidOperationException]")?;
if let Some(inner_6) = &self.message {
write!(f, ": {}", inner_6)?;
}
Ok(())
}
}
impl std::error::Error for InvalidOperationError {}
/// See [`InvalidOperationError`](crate::error::InvalidOperationError)
pub mod invalid_operation_error {
/// A builder for [`InvalidOperationError`](crate::error::InvalidOperationError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`InvalidOperationError`](crate::error::InvalidOperationError)
pub fn build(self) -> crate::error::InvalidOperationError {
crate::error::InvalidOperationError {
message: self.message,
}
}
}
}
impl InvalidOperationError {
/// Creates a new builder-style object to manufacture [`InvalidOperationError`](crate::error::InvalidOperationError)
pub fn builder() -> crate::error::invalid_operation_error::Builder {
crate::error::invalid_operation_error::Builder::default()
}
}
/// <p>A client request token already exists.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct TokenAlreadyExistsError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for TokenAlreadyExistsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("TokenAlreadyExistsError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl TokenAlreadyExistsError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for TokenAlreadyExistsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "TokenAlreadyExistsError [TokenAlreadyExistsException]")?;
if let Some(inner_7) = &self.message {
write!(f, ": {}", inner_7)?;
}
Ok(())
}
}
impl std::error::Error for TokenAlreadyExistsError {}
/// See [`TokenAlreadyExistsError`](crate::error::TokenAlreadyExistsError)
pub mod token_already_exists_error {
/// A builder for [`TokenAlreadyExistsError`](crate::error::TokenAlreadyExistsError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`TokenAlreadyExistsError`](crate::error::TokenAlreadyExistsError)
pub fn build(self) -> crate::error::TokenAlreadyExistsError {
crate::error::TokenAlreadyExistsError {
message: self.message,
}
}
}
}
impl TokenAlreadyExistsError {
/// Creates a new builder-style object to manufacture [`TokenAlreadyExistsError`](crate::error::TokenAlreadyExistsError)
pub fn builder() -> crate::error::token_already_exists_error::Builder {
crate::error::token_already_exists_error::Builder::default()
}
}
/// <p>The template contains resources with capabilities that weren't specified in the
/// Capabilities parameter.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct InsufficientCapabilitiesError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for InsufficientCapabilitiesError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("InsufficientCapabilitiesError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl InsufficientCapabilitiesError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for InsufficientCapabilitiesError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"InsufficientCapabilitiesError [InsufficientCapabilitiesException]"
)?;
if let Some(inner_8) = &self.message {
write!(f, ": {}", inner_8)?;
}
Ok(())
}
}
impl std::error::Error for InsufficientCapabilitiesError {}
/// See [`InsufficientCapabilitiesError`](crate::error::InsufficientCapabilitiesError)
pub mod insufficient_capabilities_error {
/// A builder for [`InsufficientCapabilitiesError`](crate::error::InsufficientCapabilitiesError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`InsufficientCapabilitiesError`](crate::error::InsufficientCapabilitiesError)
pub fn build(self) -> crate::error::InsufficientCapabilitiesError {
crate::error::InsufficientCapabilitiesError {
message: self.message,
}
}
}
}
impl InsufficientCapabilitiesError {
/// Creates a new builder-style object to manufacture [`InsufficientCapabilitiesError`](crate::error::InsufficientCapabilitiesError)
pub fn builder() -> crate::error::insufficient_capabilities_error::Builder {
crate::error::insufficient_capabilities_error::Builder::default()
}
}
/// <p>The specified ID refers to an operation that doesn't exist.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct OperationNotFoundError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for OperationNotFoundError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("OperationNotFoundError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl OperationNotFoundError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for OperationNotFoundError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "OperationNotFoundError [OperationNotFoundException]")?;
if let Some(inner_9) = &self.message {
write!(f, ": {}", inner_9)?;
}
Ok(())
}
}
impl std::error::Error for OperationNotFoundError {}
/// See [`OperationNotFoundError`](crate::error::OperationNotFoundError)
pub mod operation_not_found_error {
/// A builder for [`OperationNotFoundError`](crate::error::OperationNotFoundError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`OperationNotFoundError`](crate::error::OperationNotFoundError)
pub fn build(self) -> crate::error::OperationNotFoundError {
crate::error::OperationNotFoundError {
message: self.message,
}
}
}
}
impl OperationNotFoundError {
/// Creates a new builder-style object to manufacture [`OperationNotFoundError`](crate::error::OperationNotFoundError)
pub fn builder() -> crate::error::operation_not_found_error::Builder {
crate::error::operation_not_found_error::Builder::default()
}
}
/// <p>The specified type does not exist in the CloudFormation registry.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct TypeNotFoundError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for TypeNotFoundError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("TypeNotFoundError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl TypeNotFoundError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for TypeNotFoundError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "TypeNotFoundError [TypeNotFoundException]")?;
if let Some(inner_10) = &self.message {
write!(f, ": {}", inner_10)?;
}
Ok(())
}
}
impl std::error::Error for TypeNotFoundError {}
/// See [`TypeNotFoundError`](crate::error::TypeNotFoundError)
pub mod type_not_found_error {
/// A builder for [`TypeNotFoundError`](crate::error::TypeNotFoundError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`TypeNotFoundError`](crate::error::TypeNotFoundError)
pub fn build(self) -> crate::error::TypeNotFoundError {
crate::error::TypeNotFoundError {
message: self.message,
}
}
}
}
impl TypeNotFoundError {
/// Creates a new builder-style object to manufacture [`TypeNotFoundError`](crate::error::TypeNotFoundError)
pub fn builder() -> crate::error::type_not_found_error::Builder {
crate::error::type_not_found_error::Builder::default()
}
}
/// <p>An error occurred during a CloudFormation registry operation.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CFNRegistryError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for CFNRegistryError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CFNRegistryError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl CFNRegistryError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for CFNRegistryError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "CFNRegistryError [CFNRegistryException]")?;
if let Some(inner_11) = &self.message {
write!(f, ": {}", inner_11)?;
}
Ok(())
}
}
impl std::error::Error for CFNRegistryError {}
/// See [`CFNRegistryError`](crate::error::CFNRegistryError)
pub mod cfn_registry_error {
/// A builder for [`CFNRegistryError`](crate::error::CFNRegistryError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`CFNRegistryError`](crate::error::CFNRegistryError)
pub fn build(self) -> crate::error::CFNRegistryError {
crate::error::CFNRegistryError {
message: self.message,
}
}
}
}
impl CFNRegistryError {
/// Creates a new builder-style object to manufacture [`CFNRegistryError`](crate::error::CFNRegistryError)
pub fn builder() -> crate::error::cfn_registry_error::Builder {
crate::error::cfn_registry_error::Builder::default()
}
}
/// <p>Error reserved for use by the <a href="https://docs.aws.amazon.com/cloudformation-cli/latest/userguide/what-is-cloudformation-cli.html">CloudFormation CLI</a>. CloudFormation does not return this error to users.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct OperationStatusCheckFailedError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for OperationStatusCheckFailedError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("OperationStatusCheckFailedError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl OperationStatusCheckFailedError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for OperationStatusCheckFailedError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"OperationStatusCheckFailedError [OperationStatusCheckFailedException]"
)?;
if let Some(inner_12) = &self.message {
write!(f, ": {}", inner_12)?;
}
Ok(())
}
}
impl std::error::Error for OperationStatusCheckFailedError {}
/// See [`OperationStatusCheckFailedError`](crate::error::OperationStatusCheckFailedError)
pub mod operation_status_check_failed_error {
/// A builder for [`OperationStatusCheckFailedError`](crate::error::OperationStatusCheckFailedError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`OperationStatusCheckFailedError`](crate::error::OperationStatusCheckFailedError)
pub fn build(self) -> crate::error::OperationStatusCheckFailedError {
crate::error::OperationStatusCheckFailedError {
message: self.message,
}
}
}
}
impl OperationStatusCheckFailedError {
/// Creates a new builder-style object to manufacture [`OperationStatusCheckFailedError`](crate::error::OperationStatusCheckFailedError)
pub fn builder() -> crate::error::operation_status_check_failed_error::Builder {
crate::error::operation_status_check_failed_error::Builder::default()
}
}
/// <p>Error reserved for use by the <a href="https://docs.aws.amazon.com/cloudformation-cli/latest/userguide/what-is-cloudformation-cli.html">CloudFormation CLI</a>. CloudFormation does not return this error to users.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct InvalidStateTransitionError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for InvalidStateTransitionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("InvalidStateTransitionError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl InvalidStateTransitionError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for InvalidStateTransitionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"InvalidStateTransitionError [InvalidStateTransitionException]"
)?;
if let Some(inner_13) = &self.message {
write!(f, ": {}", inner_13)?;
}
Ok(())
}
}
impl std::error::Error for InvalidStateTransitionError {}
/// See [`InvalidStateTransitionError`](crate::error::InvalidStateTransitionError)
pub mod invalid_state_transition_error {
/// A builder for [`InvalidStateTransitionError`](crate::error::InvalidStateTransitionError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`InvalidStateTransitionError`](crate::error::InvalidStateTransitionError)
pub fn build(self) -> crate::error::InvalidStateTransitionError {
crate::error::InvalidStateTransitionError {
message: self.message,
}
}
}
}
impl InvalidStateTransitionError {
/// Creates a new builder-style object to manufacture [`InvalidStateTransitionError`](crate::error::InvalidStateTransitionError)
pub fn builder() -> crate::error::invalid_state_transition_error::Builder {
crate::error::invalid_state_transition_error::Builder::default()
}
}
/// <p>The specified change set name or ID doesn't exit. To view valid change sets for a
/// stack, use the <code>ListChangeSets</code> action.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ChangeSetNotFoundError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ChangeSetNotFoundError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ChangeSetNotFoundError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl ChangeSetNotFoundError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for ChangeSetNotFoundError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ChangeSetNotFoundError [ChangeSetNotFoundException]")?;
if let Some(inner_14) = &self.message {
write!(f, ": {}", inner_14)?;
}
Ok(())
}
}
impl std::error::Error for ChangeSetNotFoundError {}
/// See [`ChangeSetNotFoundError`](crate::error::ChangeSetNotFoundError)
pub mod change_set_not_found_error {
/// A builder for [`ChangeSetNotFoundError`](crate::error::ChangeSetNotFoundError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`ChangeSetNotFoundError`](crate::error::ChangeSetNotFoundError)
pub fn build(self) -> crate::error::ChangeSetNotFoundError {
crate::error::ChangeSetNotFoundError {
message: self.message,
}
}
}
}
impl ChangeSetNotFoundError {
/// Creates a new builder-style object to manufacture [`ChangeSetNotFoundError`](crate::error::ChangeSetNotFoundError)
pub fn builder() -> crate::error::change_set_not_found_error::Builder {
crate::error::change_set_not_found_error::Builder::default()
}
}
/// <p>The specified change set can't be used to update the stack. For example, the change
/// set status might be <code>CREATE_IN_PROGRESS</code>, or the stack status might be
/// <code>UPDATE_IN_PROGRESS</code>.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct InvalidChangeSetStatusError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for InvalidChangeSetStatusError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("InvalidChangeSetStatusError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl InvalidChangeSetStatusError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for InvalidChangeSetStatusError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"InvalidChangeSetStatusError [InvalidChangeSetStatusException]"
)?;
if let Some(inner_15) = &self.message {
write!(f, ": {}", inner_15)?;
}
Ok(())
}
}
impl std::error::Error for InvalidChangeSetStatusError {}
/// See [`InvalidChangeSetStatusError`](crate::error::InvalidChangeSetStatusError)
pub mod invalid_change_set_status_error {
/// A builder for [`InvalidChangeSetStatusError`](crate::error::InvalidChangeSetStatusError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`InvalidChangeSetStatusError`](crate::error::InvalidChangeSetStatusError)
pub fn build(self) -> crate::error::InvalidChangeSetStatusError {
crate::error::InvalidChangeSetStatusError {
message: self.message,
}
}
}
}
impl InvalidChangeSetStatusError {
/// Creates a new builder-style object to manufacture [`InvalidChangeSetStatusError`](crate::error::InvalidChangeSetStatusError)
pub fn builder() -> crate::error::invalid_change_set_status_error::Builder {
crate::error::invalid_change_set_status_error::Builder::default()
}
}
/// <p>You can't yet delete this stack set, because it still contains one or more stack
/// instances. Delete all stack instances from the stack set before deleting the stack
/// set.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct StackSetNotEmptyError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for StackSetNotEmptyError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("StackSetNotEmptyError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl StackSetNotEmptyError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for StackSetNotEmptyError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "StackSetNotEmptyError [StackSetNotEmptyException]")?;
if let Some(inner_16) = &self.message {
write!(f, ": {}", inner_16)?;
}
Ok(())
}
}
impl std::error::Error for StackSetNotEmptyError {}
/// See [`StackSetNotEmptyError`](crate::error::StackSetNotEmptyError)
pub mod stack_set_not_empty_error {
/// A builder for [`StackSetNotEmptyError`](crate::error::StackSetNotEmptyError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`StackSetNotEmptyError`](crate::error::StackSetNotEmptyError)
pub fn build(self) -> crate::error::StackSetNotEmptyError {
crate::error::StackSetNotEmptyError {
message: self.message,
}
}
}
}
impl StackSetNotEmptyError {
/// Creates a new builder-style object to manufacture [`StackSetNotEmptyError`](crate::error::StackSetNotEmptyError)
pub fn builder() -> crate::error::stack_set_not_empty_error::Builder {
crate::error::stack_set_not_empty_error::Builder::default()
}
}
/// <p>The specified name is already in use.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct NameAlreadyExistsError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for NameAlreadyExistsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("NameAlreadyExistsError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl NameAlreadyExistsError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for NameAlreadyExistsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "NameAlreadyExistsError [NameAlreadyExistsException]")?;
if let Some(inner_17) = &self.message {
write!(f, ": {}", inner_17)?;
}
Ok(())
}
}
impl std::error::Error for NameAlreadyExistsError {}
/// See [`NameAlreadyExistsError`](crate::error::NameAlreadyExistsError)
pub mod name_already_exists_error {
/// A builder for [`NameAlreadyExistsError`](crate::error::NameAlreadyExistsError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`NameAlreadyExistsError`](crate::error::NameAlreadyExistsError)
pub fn build(self) -> crate::error::NameAlreadyExistsError {
crate::error::NameAlreadyExistsError {
message: self.message,
}
}
}
}
impl NameAlreadyExistsError {
/// Creates a new builder-style object to manufacture [`NameAlreadyExistsError`](crate::error::NameAlreadyExistsError)
pub fn builder() -> crate::error::name_already_exists_error::Builder {
crate::error::name_already_exists_error::Builder::default()
}
}
/// <p>The quota for the resource has already been reached.</p>
/// <p>For information on resource and stack limitations, see <a href="https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/cloudformation-limits.html">Limits</a> in
/// the <i>AWS CloudFormation User Guide</i>.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct LimitExceededError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for LimitExceededError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("LimitExceededError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl LimitExceededError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for LimitExceededError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "LimitExceededError [LimitExceededException]")?;
if let Some(inner_18) = &self.message {
write!(f, ": {}", inner_18)?;
}
Ok(())
}
}
impl std::error::Error for LimitExceededError {}
/// See [`LimitExceededError`](crate::error::LimitExceededError)
pub mod limit_exceeded_error {
/// A builder for [`LimitExceededError`](crate::error::LimitExceededError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`LimitExceededError`](crate::error::LimitExceededError)
pub fn build(self) -> crate::error::LimitExceededError {
crate::error::LimitExceededError {
message: self.message,
}
}
}
}
impl LimitExceededError {
/// Creates a new builder-style object to manufacture [`LimitExceededError`](crate::error::LimitExceededError)
pub fn builder() -> crate::error::limit_exceeded_error::Builder {
crate::error::limit_exceeded_error::Builder::default()
}
}
/// <p>The specified resource exists, but has been changed.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CreatedButModifiedError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for CreatedButModifiedError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CreatedButModifiedError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl CreatedButModifiedError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for CreatedButModifiedError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "CreatedButModifiedError [CreatedButModifiedException]")?;
if let Some(inner_19) = &self.message {
write!(f, ": {}", inner_19)?;
}
Ok(())
}
}
impl std::error::Error for CreatedButModifiedError {}
/// See [`CreatedButModifiedError`](crate::error::CreatedButModifiedError)
pub mod created_but_modified_error {
/// A builder for [`CreatedButModifiedError`](crate::error::CreatedButModifiedError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`CreatedButModifiedError`](crate::error::CreatedButModifiedError)
pub fn build(self) -> crate::error::CreatedButModifiedError {
crate::error::CreatedButModifiedError {
message: self.message,
}
}
}
}
impl CreatedButModifiedError {
/// Creates a new builder-style object to manufacture [`CreatedButModifiedError`](crate::error::CreatedButModifiedError)
pub fn builder() -> crate::error::created_but_modified_error::Builder {
crate::error::created_but_modified_error::Builder::default()
}
}
/// <p>The resource with the name requested already exists.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct AlreadyExistsError {
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for AlreadyExistsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("AlreadyExistsError");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl AlreadyExistsError {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for AlreadyExistsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "AlreadyExistsError [AlreadyExistsException]")?;
if let Some(inner_20) = &self.message {
write!(f, ": {}", inner_20)?;
}
Ok(())
}
}
impl std::error::Error for AlreadyExistsError {}
/// See [`AlreadyExistsError`](crate::error::AlreadyExistsError)
pub mod already_exists_error {
/// A builder for [`AlreadyExistsError`](crate::error::AlreadyExistsError)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`AlreadyExistsError`](crate::error::AlreadyExistsError)
pub fn build(self) -> crate::error::AlreadyExistsError {
crate::error::AlreadyExistsError {
message: self.message,
}
}
}
}
impl AlreadyExistsError {
/// Creates a new builder-style object to manufacture [`AlreadyExistsError`](crate::error::AlreadyExistsError)
pub fn builder() -> crate::error::already_exists_error::Builder {
crate::error::already_exists_error::Builder::default()
}
}
| {
matches!(
&self.kind,
DeleteStackInstancesErrorKind::OperationInProgressError(_)
)
} |
root.go | package cmd
import (
"fmt"
"os"
"github.com/evankanderson/knuts/pkg"
"github.com/spf13/cobra"
)
var rootCmd = &cobra.Command{
Use: "knuts",
Short: "Knuts is an install and management utility for Knative",
Version: "0.1",
}
func init() {
rootCmd.PersistentFlags().BoolVar(&pkg.DryRun, "dry_run", true, "When true, print operations rather than executing them.")
// rootCmd.PersistentFlags().StringVar(&pkg.GCPProject, "gcp_project", "", "GCP Project to use for GCP operations")
}
// Execute is the root Cobra command.
func Execute() {
if err := rootCmd.Execute(); err != nil {
fmt.Println(err)
os.Exit(1)
} | } |
|
validate-binary-search-tree.go | package _098_validate_binary_search_tree
import "github.com/yigenshutiao/Golang-algorithm-template/util"
type TreeNode = util.TreeNode
func isValidBST(root *TreeNode) bool {
min, max := -9999999999999, 9999999999999
return traverse(root, min, max) | }
func traverse(root *TreeNode, min, max int) bool {
if root == nil {
return true
}
return root.Val < max && root.Val > min && traverse(root.Left, min, root.Val) && traverse(root.Right, root.Val, max)
} | |
jwt_test.go | package jwt_test
import (
"net/http"
"net/http/httptest"
"strings"
"testing"
"github.com/simonhylander/gorsk/pkg/utl/middleware/jwt"
"github.com/simonhylander/gorsk/pkg/utl/model"
"github.com/simonhylander/gorsk/pkg/utl/mock"
"github.com/labstack/echo"
"github.com/stretchr/testify/assert"
)
func echoHandler(mw ...echo.MiddlewareFunc) *echo.Echo {
e := echo.New()
for _, v := range mw {
e.Use(v)
}
e.GET("/hello", hwHandler)
return e
}
func hwHandler(c echo.Context) error {
return c.String(200, "Hello World")
}
func TestMWFunc(t *testing.T) {
cases := []struct {
name string
wantStatus int
header string
signMethod string
}{
{
name: "Empty header",
wantStatus: http.StatusUnauthorized,
},
{
name: "Header not containing Bearer",
header: "notBearer",
wantStatus: http.StatusUnauthorized,
},
{
name: "Invalid header",
header: mock.HeaderInvalid(),
wantStatus: http.StatusUnauthorized,
},
{
name: "Success",
header: mock.HeaderValid(),
wantStatus: http.StatusOK,
},
}
jwtMW := jwt.New("jwtsecret", "HS256", 60)
ts := httptest.NewServer(echoHandler(jwtMW.MWFunc()))
defer ts.Close()
path := ts.URL + "/hello"
client := &http.Client{}
for _, tt := range cases {
t.Run(tt.name, func(t *testing.T) {
req, _ := http.NewRequest("GET", path, nil)
req.Header.Set("Authorization", tt.header)
res, err := client.Do(req)
if err != nil {
t.Fatal("Cannot create http request")
}
assert.Equal(t, tt.wantStatus, res.StatusCode)
})
}
}
func | (t *testing.T) {
cases := []struct {
name string
wantToken string
algo string
req *gorsk.User
}{
{
name: "Invalid algo",
algo: "invalid",
},
{
name: "Success",
algo: "HS256",
req: &gorsk.User{
Base: gorsk.Base{
ID: 1,
},
Username: "johndoe",
Email: "[email protected]",
Role: &gorsk.Role{
AccessLevel: gorsk.SuperAdminRole,
},
CompanyID: 1,
LocationID: 1,
},
wantToken: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9",
},
}
for _, tt := range cases {
t.Run(tt.name, func(t *testing.T) {
if tt.algo != "HS256" {
assert.Panics(t, func() {
jwt.New("jwtsecret", tt.algo, 60)
}, "The code did not panic")
return
}
jwt := jwt.New("jwtsecret", tt.algo, 60)
str, _, err := jwt.GenerateToken(tt.req)
assert.Nil(t, err)
assert.Equal(t, tt.wantToken, strings.Split(str, ".")[0])
})
}
}
| TestGenerateToken |
queries_trees.go | package api
import (
"bytes"
"fmt"
"strings"
)
type Trees struct {
Sha string `json:"sha"`
Url string `json:"url"`
Tree []Node `json:"tree"`
Truncated bool `json:"truncated"`
}
type Node struct {
Path string `json:"path"`
Mode string `json:"mode"`
Type string `json:"type"`
Size int64 `json:"size"`
Sha string `json:"sha"`
Url string `json:"url"`
}
func (tree Trees) FilterPath(path string) []Node {
newNodes := make([]Node, 0)
for _, node := range tree.Tree {
if strings.HasPrefix(node.Path, path) {
newNodes = append(newNodes, node)
}
}
return newNodes
}
func GetTrees(client *Client, owner string, repo string, treeSha string, recursive bool) (*Trees, error) | {
if treeSha == "" {
treeSha = "HEAD"
}
path := fmt.Sprintf("repos/%s/%s/git/trees/%s", owner, repo, treeSha)
if recursive {
path += "?recursive=1"
}
var trees Trees
r := bytes.NewReader([]byte(`{}`))
err := client.REST("https://api.github.com/", "GET", path, r, &trees)
if err != nil {
return nil, err
}
return &trees, nil
} |
|
SubMerchantParams.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.constant.ParamConstants import *
class SubMerchantParams(object):
def __init__(self):
self._sub_merchant_id = None
self._sub_merchant_name = None
self._sub_merchant_service_description = None
self._sub_merchant_service_name = None
@property
def sub_merchant_id(self):
return self._sub_merchant_id
@sub_merchant_id.setter
def sub_merchant_id(self, value):
self._sub_merchant_id = value
@property
def sub_merchant_name(self):
return self._sub_merchant_name
@sub_merchant_name.setter
def sub_merchant_name(self, value):
self._sub_merchant_name = value
@property
def sub_merchant_service_description(self):
return self._sub_merchant_service_description
@sub_merchant_service_description.setter
def sub_merchant_service_description(self, value):
self._sub_merchant_service_description = value
@property
def | (self):
return self._sub_merchant_service_name
@sub_merchant_service_name.setter
def sub_merchant_service_name(self, value):
self._sub_merchant_service_name = value
def to_alipay_dict(self):
params = dict()
if self.sub_merchant_id:
if hasattr(self.sub_merchant_id, 'to_alipay_dict'):
params['sub_merchant_id'] = self.sub_merchant_id.to_alipay_dict()
else:
params['sub_merchant_id'] = self.sub_merchant_id
if self.sub_merchant_name:
if hasattr(self.sub_merchant_name, 'to_alipay_dict'):
params['sub_merchant_name'] = self.sub_merchant_name.to_alipay_dict()
else:
params['sub_merchant_name'] = self.sub_merchant_name
if self.sub_merchant_service_description:
if hasattr(self.sub_merchant_service_description, 'to_alipay_dict'):
params['sub_merchant_service_description'] = self.sub_merchant_service_description.to_alipay_dict()
else:
params['sub_merchant_service_description'] = self.sub_merchant_service_description
if self.sub_merchant_service_name:
if hasattr(self.sub_merchant_service_name, 'to_alipay_dict'):
params['sub_merchant_service_name'] = self.sub_merchant_service_name.to_alipay_dict()
else:
params['sub_merchant_service_name'] = self.sub_merchant_service_name
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = SubMerchantParams()
if 'sub_merchant_id' in d:
o.sub_merchant_id = d['sub_merchant_id']
if 'sub_merchant_name' in d:
o.sub_merchant_name = d['sub_merchant_name']
if 'sub_merchant_service_description' in d:
o.sub_merchant_service_description = d['sub_merchant_service_description']
if 'sub_merchant_service_name' in d:
o.sub_merchant_service_name = d['sub_merchant_service_name']
return o
| sub_merchant_service_name |
auth.role.has.directive.ts | import { Maybe, AuthRole, ArrayOrValue } from '@dereekb/util';
import { BehaviorSubject } from 'rxjs';
import { Directive, Input, TemplateRef, ViewContainerRef, OnDestroy } from '@angular/core';
import { authRolesSetContainsAllRolesFrom, DbxAuthService } from './service';
import { AbstractIfDirective } from '../view/if.directive';
/**
* Structural decorator directive similar to ngIf that embeds content if the current auth user has all of the target role(s).
*/
@Directive({
selector: '[dbxAuthHasRoles]'
})
export class | extends AbstractIfDirective implements OnDestroy {
private _targetRoles = new BehaviorSubject<Maybe<ArrayOrValue<AuthRole>>>(undefined);
readonly targetRoles$ = this._targetRoles.asObservable();
readonly show$ = this.dbxAuthService.authRoles$.pipe(authRolesSetContainsAllRolesFrom(this.targetRoles$));
constructor(templateRef: TemplateRef<unknown>, viewContainer: ViewContainerRef, private dbxAuthService: DbxAuthService) {
super(templateRef, viewContainer);
}
override ngOnDestroy(): void {
this._targetRoles.complete();
}
@Input('dbxAuthHasRoles')
set targetRoles(roles: Maybe<ArrayOrValue<AuthRole>>) {
this._targetRoles.next(roles);
}
}
| DbxAuthHasRolesDirective |
mod.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/// Backtrace support built on libgcc with some extra OS-specific support
///
/// Some methods of getting a backtrace:
///
/// * The backtrace() functions on unix. It turns out this doesn't work very
/// well for green threads on OSX, and the address to symbol portion of it
/// suffers problems that are described below.
///
/// * Using libunwind. This is more difficult than it sounds because libunwind
/// isn't installed everywhere by default. It's also a bit of a hefty library,
/// so possibly not the best option. When testing, libunwind was excellent at
/// getting both accurate backtraces and accurate symbols across platforms.
/// This route was not chosen in favor of the next option, however.
///
/// * We're already using libgcc_s for exceptions in rust (triggering thread
/// unwinding and running destructors on the stack), and it turns out that it
/// conveniently comes with a function that also gives us a backtrace. All of
/// these functions look like _Unwind_*, but it's not quite the full
/// repertoire of the libunwind API. Due to it already being in use, this was
/// the chosen route of getting a backtrace.
///
/// After choosing libgcc_s for backtraces, the sad part is that it will only
/// give us a stack trace of instruction pointers. Thankfully these instruction
/// pointers are accurate (they work for green and native threads), but it's
/// then up to us again to figure out how to translate these addresses to
/// symbols. As with before, we have a few options. Before, that, a little bit
/// of an interlude about symbols. This is my very limited knowledge about
/// symbol tables, and this information is likely slightly wrong, but the
/// general idea should be correct.
///
/// When talking about symbols, it's helpful to know a few things about where
/// symbols are located. Some symbols are located in the dynamic symbol table
/// of the executable which in theory means that they're available for dynamic
/// linking and lookup. Other symbols end up only in the local symbol table of
/// the file. This loosely corresponds to pub and priv functions in Rust.
///
/// Armed with this knowledge, we know that our solution for address to symbol
/// translation will need to consult both the local and dynamic symbol tables.
/// With that in mind, here's our options of translating an address to
/// a symbol.
///
/// * Use dladdr(). The original backtrace()-based idea actually uses dladdr()
/// behind the scenes to translate, and this is why backtrace() was not used.
/// Conveniently, this method works fantastically on OSX. It appears dladdr()
/// uses magic to consult the local symbol table, or we're putting everything
/// in the dynamic symbol table anyway. Regardless, for OSX, this is the
/// method used for translation. It's provided by the system and easy to do.o
///
/// Sadly, all other systems have a dladdr() implementation that does not
/// consult the local symbol table. This means that most functions are blank
/// because they don't have symbols. This means that we need another solution.
///
/// * Use unw_get_proc_name(). This is part of the libunwind api (not the
/// libgcc_s version of the libunwind api), but involves taking a dependency
/// to libunwind. We may pursue this route in the future if we bundle
/// libunwind, but libunwind was unwieldy enough that it was not chosen at
/// this time to provide this functionality.
///
/// * Shell out to a utility like `readelf`. Crazy though it may sound, it's a
/// semi-reasonable solution. The stdlib already knows how to spawn processes,
/// so in theory it could invoke readelf, parse the output, and consult the
/// local/dynamic symbol tables from there. This ended up not getting chosen
/// due to the craziness of the idea plus the advent of the next option.
///
/// * Use `libbacktrace`. It turns out that this is a small library bundled in
/// the gcc repository which provides backtrace and symbol translation
/// functionality. All we really need from it is the backtrace functionality,
/// and we only really need this on everything that's not OSX, so this is the
/// chosen route for now.
///
/// In summary, the current situation uses libgcc_s to get a trace of stack
/// pointers, and we use dladdr() or libbacktrace to translate these addresses
/// to symbols. This is a bit of a hokey implementation as-is, but it works for
/// all unix platforms we support right now, so it at least gets the job done.
pub use self::tracing::write;
use io;
use io::prelude::*;
use libc;
use str;
use sys_common::backtrace::{demangle, HEX_WIDTH};
// tracing impls:
mod tracing;
// symbol resolvers:
mod printing;
pub fn output(w: &mut Write, idx: isize, addr: *mut libc::c_void,
s: Option<&[u8]>) -> io::Result<()> {
try!(write!(w, " {:2}: {:2$?} - ", idx, addr, HEX_WIDTH));
match s.and_then(|s| str::from_utf8(s).ok()) {
Some(string) => try!(demangle(w, string)),
None => try!(write!(w, "<unknown>")),
}
w.write_all(&['\n' as u8])
}
#[allow(dead_code)]
pub fn | (w: &mut Write, file: &[u8], line: libc::c_int,
more: bool) -> io::Result<()> {
let file = str::from_utf8(file).unwrap_or("<unknown>");
// prior line: " ##: {:2$} - func"
try!(write!(w, " {:3$}at {}:{}", "", file, line, HEX_WIDTH));
if more {
try!(write!(w, " <... and possibly more>"));
}
w.write_all(&['\n' as u8])
}
| output_fileline |
ws.py | import os
import json
import time
import asyncio
import collections
from aiohttp import web, WSMsgType
from typing import List, Dict, Any, Callable
from .utils import resource_conditions, TTLQueue
from services.utils import logging
from pyee import AsyncIOEventEmitter
from ..data.refiner import TaskRefiner, ArtifactRefiner
from throttler import throttle_simultaneous
WS_QUEUE_TTL_SECONDS = os.environ.get("WS_QUEUE_TTL_SECONDS", 60 * 5) # 5 minute TTL by default
WS_POSTPROCESS_CONCURRENCY_LIMIT = int(os.environ.get("WS_POSTPROCESS_CONCURRENCY_LIMIT", 8))
SUBSCRIBE = 'SUBSCRIBE'
UNSUBSCRIBE = 'UNSUBSCRIBE'
WSSubscription = collections.namedtuple(
"WSSubscription", "ws disconnected_ts fullpath resource query uuid filter")
class Websocket(object):
'''
Adds a '/ws' endpoint and support for broadcasting realtime resource events to subscribed frontend clients.
Subscribe to runs created by user dipper:
/runs?_tags=user:dipper
'uuid' can be used to identify specific subscription.
Subscribe to future events:
{"type": "SUBSCRIBE", "uuid": "myst3rySh4ck", "resource": "/runs"}
Subscribing to future events and return past data since unix time (seconds):
{"type": "SUBSCRIBE", "uuid": "myst3rySh4ck", "resource": "/runs", "since": 1602752197}
Unsubscribe:
{"type": "UNSUBSCRIBE", "uuid": "myst3rySh4ck"}
Example event:
{"type": "UPDATE", "uuid": "myst3rySh4ck", "resource": "/runs", "data": {"foo": "bar"}}
'''
subscriptions: List[WSSubscription] = []
def __init__(self, app, db, event_emitter=None, queue_ttl: int = WS_QUEUE_TTL_SECONDS, cache=None):
self.event_emitter = event_emitter or AsyncIOEventEmitter()
self.db = db
self.queue = TTLQueue(queue_ttl)
self.task_refiner = TaskRefiner(cache=cache.artifact_cache) if cache else None
self.artifact_refiner = ArtifactRefiner(cache=cache.artifact_cache) if cache else None
self.logger = logging.getLogger("Websocket")
event_emitter.on('notify', self.event_handler)
app.router.add_route('GET', '/ws', self.websocket_handler)
self.loop = asyncio.get_event_loop()
async def event_handler(self, operation: str, resources: List[str], data: Dict, table_name: str = None, filter_dict: Dict = {}):
"""
Event handler for websocket events on 'notify'.
Either receives raw data from table triggers listener and either performs a database load
before broadcasting from the provided table, or receives predefined data and broadcasts it as-is.
Parameters
----------
operation : str
name of the operation related to the DB event, either 'INSERT' or 'UPDATE'
resources : List[str]
List of resource paths that this event is related to. Used strictly for broadcasting to
websocket subscriptions
data : Dict
The data of the record to be broadcast. Can either be complete, or partial.
In case of partial data (and a provided table name) this is only used for the DB query.
table_name : str (optional)
name of the table that the complete data should be queried from.
filter_dict : Dict (optional)
a dictionary of filters used in the query when fetching complete data.
"""
# Check if event needs to be broadcast (if anyone is subscribed to the resource)
if any(subscription.resource in resources for subscription in self.subscriptions):
# load the data and postprocessor for broadcasting if table
# is provided (otherwise data has already been loaded in advance)
if table_name:
table = self.db.get_table_by_name(table_name)
_postprocess = await self.get_table_postprocessor(table_name)
_data = await load_data_from_db(table, data, filter_dict, postprocess=_postprocess)
else:
_data = data
if not _data:
# Skip sending this event to subscriptions in case data is None or empty.
# This could be caused by insufficient/broken data and can break the UI.
return
# Append event to the queue so that we can later dispatch them in case of disconnections
#
# NOTE: server instance specific ws queue will not work when scaling across multiple instances.
# but on the other hand loading data and pushing everything into the queue for every server instance is also
# a suboptimal solution.
await self.queue.append({
'operation': operation,
'resources': resources,
'data': _data
})
for subscription in self.subscriptions:
try:
if subscription.disconnected_ts and time.time() - subscription.disconnected_ts > WS_QUEUE_TTL_SECONDS:
await self.unsubscribe_from(subscription.ws, subscription.uuid)
else:
await self._event_subscription(subscription, operation, resources, _data)
except ConnectionResetError:
self.logger.debug("Trying to broadcast to a stale subscription. Unsubscribing")
await self.unsubscribe_from(subscription.ws, subscription.uuid)
except Exception:
self.logger.exception("Broadcasting to subscription failed")
async def _event_subscription(self, subscription: WSSubscription, operation: str, resources: List[str], data: Dict):
for resource in resources:
if subscription.resource == resource:
# Check if possible filters match this event
# only if the subscription actually provided conditions.
|
async def subscribe_to(self, ws, uuid: str, resource: str, since: int):
# Always unsubscribe existing duplicate identifiers
await self.unsubscribe_from(ws, uuid)
# Create new subscription
_resource, query, filter_fn = resource_conditions(resource)
subscription = WSSubscription(
ws=ws, fullpath=resource, resource=_resource, query=query, uuid=uuid,
filter=filter_fn, disconnected_ts=None)
self.subscriptions.append(subscription)
# Send previous events that client might have missed due to disconnection
if since:
# Subtract 1 second to make sure all events are included
event_queue = await self.queue.values_since(since)
for _, event in event_queue:
self.loop.create_task(
self._event_subscription(subscription, event['operation'], event['resources'], event['data'])
)
async def unsubscribe_from(self, ws, uuid: str = None):
if uuid:
self.subscriptions = list(
filter(lambda s: uuid != s.uuid or ws != s.ws, self.subscriptions))
else:
self.subscriptions = list(
filter(lambda s: ws != s.ws, self.subscriptions))
async def handle_disconnect(self, ws):
"""
Sets disconnected timestamp on websocket subscription without removing it from the list.
Removing is handled by event_handler that checks for expired subscriptions before emitting
"""
self.subscriptions = list(
map(
lambda sub: sub._replace(disconnected_ts=time.time()) if sub.ws == ws else sub,
self.subscriptions)
)
async def websocket_handler(self, request):
"Handler for received messages from the open Web Socket connection."
# TODO: Consider using options autoping=True and heartbeat=20 if supported by clients.
ws = web.WebSocketResponse()
await ws.prepare(request)
while not ws.closed:
async for msg in ws:
if msg.type == WSMsgType.TEXT:
try:
# Custom ping message handling.
# If someone is pinging, lets answer with pong rightaway.
if msg.data == "__ping__":
await ws.send_str("__pong__")
else:
payload = json.loads(msg.data)
op_type = payload.get("type")
resource = payload.get("resource")
uuid = payload.get("uuid")
since = payload.get("since")
if since is not None and str(since).isnumeric():
since = int(since)
else:
since = None
if op_type == SUBSCRIBE and uuid and resource:
await self.subscribe_to(ws, uuid, resource, since)
elif op_type == UNSUBSCRIBE and uuid:
await self.unsubscribe_from(ws, uuid)
except Exception:
self.logger.exception("Exception occurred.")
# Always remove clients from listeners
await self.handle_disconnect(ws)
return ws
@throttle_simultaneous(count=8)
async def get_table_postprocessor(self, table_name):
if table_name == self.db.task_table_postgres.table_name:
return self.task_refiner.postprocess
elif table_name == self.db.artifact_table_postgres.table_name:
return self.artifact_refiner.postprocess
else:
return None
async def load_data_from_db(table, data: Dict[str, Any],
filter_dict: Dict = {},
postprocess: Callable = None):
# filter the data for loading based on available primary keys
conditions_dict = {
key: data[key] for key in table.primary_keys
if key in data
}
filter_dict = {**conditions_dict, **filter_dict}
conditions, values = [], []
for k, v in filter_dict.items():
conditions.append("{} = %s".format(k))
values.append(v)
results, *_ = await table.find_records(
conditions=conditions, values=values, fetch_single=True,
enable_joins=True,
expanded=True,
postprocess=postprocess
)
return results.body
| if subscription.filter:
filters_match_request = subscription.filter(data)
else:
filters_match_request = True
if filters_match_request:
payload = {'type': operation, 'uuid': subscription.uuid,
'resource': resource, 'data': data}
await subscription.ws.send_str(json.dumps(payload)) |
csvjoin.go | package main
import (
"encoding/csv"
"fmt"
"io"
"log"
"os"
"sort"
"strings"
)
var (
writer *csv.Writer
)
func | () {
fileNames := GetFileNames()
readers := OpenReaders(fileNames)
allHeaders := GatherAllHeaders(readers, fileNames)
joinColumns := IdentifyJoinColumns(allHeaders)
outputColumns := IdentifyOutputColumns(allHeaders)
allKeys, allData := ReadAllInputSources(readers, allHeaders, joinColumns)
writer = csv.NewWriter(os.Stdout)
err := writer.Write(outputColumns)
if err != nil {
log.Fatalf("failed to write CSV output: %v", err)
}
for _, key := range allKeys {
WriteCSVs(key, outputColumns, allData)
}
writer.Flush()
}
// WriteCSVs writes out the full join of records across all the data collections
// for a single key.
func WriteCSVs(key string, outputColumns []string, allData []DataCollection) {
prt := func(recs []Record) {
row := []string{}
for _, col := range outputColumns {
got := false
for _, rec := range recs {
v, ok := rec[col]
if ok {
row = append(row, v)
got = true
break
}
}
if !got {
row = append(row, "")
}
}
err := writer.Write(row)
if err != nil {
log.Fatalf("failed to write CSV output: %v", err)
}
}
recurse(key, []Record{}, allData, prt)
}
// Printer is a function that prints a record from a slice of Records.
type Printer func([]Record)
// recurse is a recurser to iterate over all the combinations of Records for a
// particular key.
func recurse(key string, recs []Record, remain []DataCollection, prt Printer) {
if len(remain) == 0 {
prt(recs)
return
}
this := remain[0]
thisRecords := this.data[key]
if len(thisRecords) == 0 {
recurse(key, recs, remain[1:], prt)
return
}
for _, rec := range thisRecords {
recurse(key, append(recs, rec), remain[1:], prt)
}
}
// ReadAllInputSources reads all the readers, loading all data into
// DataCollections. Returns a list of distinct keys (across all inputs), and a
// list of all the DataCollections.
func ReadAllInputSources(readers []*csv.Reader, allHeaders [][]string, joinColumns []string) ([]string, []DataCollection) {
keyMap := map[string]bool{}
allData := []DataCollection{}
for i, r := range readers {
data := ReadData(r, allHeaders[i], joinColumns)
for k := range data.data {
keyMap[k] = true
}
allData = append(allData, data)
}
keys := []string{}
for k := range keyMap {
keys = append(keys, k)
}
sort.Strings(keys)
return keys, allData
}
// ReadData reads a CSV input source collecting all the input into a DataCollection.
func ReadData(reader *csv.Reader, headers []string, joinColumns []string) DataCollection {
recordOf := func(row []string) Record {
r := Record{}
for i, v := range row {
n := headers[i]
r[n] = v
}
return r
}
keyOf := func(rec Record) string {
sb := strings.Builder{}
for i, c := range joinColumns {
if i > 0 {
sb.WriteString("++")
}
sb.WriteString(rec[c])
}
return sb.String()
}
data := NewDataCollection()
for {
row, err := reader.Read()
if err == io.EOF {
break
}
if err != nil {
log.Fatalf("failed to read/parse CSV input: %v", err)
}
rec := recordOf(row)
key := keyOf(rec)
data.Add(key, rec)
}
return data
}
// GetFileNames gets the list of file names from command line arguments. If no
// files named, prints usage message and aborts program.
func GetFileNames() []string {
fileNames := os.Args[1:]
if len(fileNames) < 2 {
fmt.Fprintf(os.Stderr, "usage: %s f1.csv f2.csv ...\n", os.Args[0])
os.Exit(1)
}
return fileNames
}
// OpenReaders opens all the named files and creates a CSV reader for each input
// source.
func OpenReaders(fileNames []string) []*csv.Reader {
readers := []*csv.Reader{}
for _, fName := range fileNames {
r, err := os.Open(fName)
if err != nil {
log.Fatalf("cannot read CSV file %s: %v", fName, err)
}
readers = append(readers, csv.NewReader(r))
}
return readers
}
// GatherAllHeaders reads the firest line of each CSV reader, and returns the
// list of all header lists.
func GatherAllHeaders(readers []*csv.Reader, fileNames []string) [][]string {
allHeaders := [][]string{}
for i, r := range readers {
header, err := r.Read()
if err == io.EOF {
log.Fatalf("CSV file %s has no headers. cannot process.", fileNames[i])
}
allHeaders = append(allHeaders, header)
}
return allHeaders
}
// IdentifyJoinColumns looks over all the headers of all the inputs and
// identifies which columns are in all the input sources.
func IdentifyJoinColumns(allHeaders [][]string) []string {
headerCounts := map[string]int{}
for _, header := range allHeaders {
for _, col := range header {
headerCounts[col]++
}
}
joinColumns := []string{}
for col, count := range headerCounts {
if count == len(allHeaders) {
joinColumns = append(joinColumns, col)
}
}
if len(joinColumns) == 0 {
log.Fatalf("cannot identify columns common to all input files to join")
}
return joinColumns
}
// IdentifyOutputColumns returns the unique columns across all the input
// sources.
func IdentifyOutputColumns(allHeaders [][]string) []string {
outputFields := UniqueSlice{}
for _, header := range allHeaders {
for _, col := range header {
outputFields.Append(col)
}
}
return outputFields.GetSlice()
}
// DataCollection is a collection of records, mapped by key.
type DataCollection struct {
data map[string][]Record
}
// NewDataCollection sets up a new DataCollection
func NewDataCollection() DataCollection {
dc := DataCollection{}
dc.data = map[string][]Record{}
return dc
}
// Record is a set of data, mapped by column name.
type Record map[string]string
// Add appends another record to the data collection.
func (dc *DataCollection) Add(key string, rec Record) {
cur := dc.data[key]
dc.data[key] = append(cur, rec)
}
// UniqueSlice contains a slice of distinct strings.
type UniqueSlice struct {
slice []string
}
// Append adds the string to the slice, only if not already present.
func (u *UniqueSlice) Append(s string) {
for _, x := range u.slice {
if x == s {
return
}
}
u.slice = append(u.slice, s)
}
// GetSlice returns the slice containing the unique values.
func (u *UniqueSlice) GetSlice() []string {
return u.slice
}
| main |
Location.js | import React from 'react';
import { connect, useDispatch, useSelector } from 'react-redux';
import Window from '../../Window';
import Field from './Field';
import FriendlyUnit from './FriendlyUnit';
import EnemyUnit from './EnemyUnit';
import ReduxActions from '../../../js/redux/actions';
import WindowLocation from '../../../js/windows/window-location';
import '../../../stylesheets/window-location.css';
import Logger from '../../../js/utils/logger';
import {
initializeEnemyUnitsAction,
initializeFriendlyUnitsAction,
} from '../../../js/api/services/instance-service';
import useAfterPaintEffect from '../../../js/react/hooks/after-paint-effect';
function Location() {
const enemyUnits = useSelector((state) => state.enemyUnits);
const friendlyUnits = useSelector((state) => state.friendlyUnits);
const dispatch = useDispatch();
useAfterPaintEffect(async () => {
const updateEnemyUnits = (units) => {
dispatch(ReduxActions.updateEnemyUnitsAction(units));
};
const updateFriendlyUnits = (units) => {
dispatch(ReduxActions.updateFriendlyUnitsAction(units));
};
let response = await initializeEnemyUnitsAction();
updateEnemyUnits(response.data);
response = await initializeFriendlyUnitsAction();
updateFriendlyUnits(response.data);
if (window.debug) {
Logger.log('Location initialized...');
}
}, []);
useAfterPaintEffect(() => {
if (friendlyUnits[0]) {
const friendlyUnit = friendlyUnits[0];
WindowLocation.setEquipmentBackground(friendlyUnit.gender);
/**
* this should be moved from DOM to component as well (hiding/showing component breaks it)
*/
window.turmoil.instance.activeUnit = friendlyUnit.ident;
window.turmoil.instance.polygonsInRange = friendlyUnit.polygonsInRange;
WindowLocation.setActivePolygons();
}
});
const updateUnit = (unit) => {
if (unit.ident.includes('Enemy')) {
dispatch(ReduxActions.updateEnemyUnitsAction({ unitToUpdate: unit }));
} else {
dispatch(ReduxActions.updateFriendlyUnitsAction({ unitToUpdate: unit }));
}
};
const finalizeActionOnField = (data) => {
if (data && data.actionType === 'move') {
let unit = friendlyUnits.find(
(friendlyUnit) => friendlyUnit.ident === data.unitId,
);
if (!unit) {
unit = enemyUnits.find((enemyUnit) => enemyUnit.ident === data.unitId);
}
if (unit) {
unit.position = data.polygonId;
if (data.polygonsInRange) {
unit.polygonsInRange = data.polygonsInRange;
}
updateUnit(unit);
}
}
};
const background = {
backgroundImage: "url('/images/backgrounds/background_grunge_650x550.png')",
backgroundSize: 'cover',
width: '850px',
height: '780px',
};
/*
* TODO: this needs to come from back end at some point
*/
const fields = [];
for (let i = 1; i < 11; i += 1) { | for (let j = 1; j < 9; j += 1) {
fields.push({ column: i, row: j });
}
}
return (
<Window ident="location" background={background}>
{friendlyUnits.map((unit) => (
<FriendlyUnit
ident={unit.ident}
portrait={unit.portrait}
healthBar={unit.healthBar}
position={unit.position}
key={unit.ident}
movement={unit.movementPoints}
/>
))}
{enemyUnits.map((unit) => (
<EnemyUnit
ident={unit.ident}
portrait={unit.portrait}
healthBar={unit.healthBar}
position={unit.position}
key={unit.ident}
movement={unit.movementPoints}
locationCallbackAction={finalizeActionOnField}
/>
))}
<div className="instanceSvg">
<svg
style={{
width: '160px',
height: '160px',
left: '320',
top: '320',
position: 'relative',
transform: 'scale(5)',
}}
id="svgElement"
>
<g>
{fields.map((field) => {
const fieldIdent = `polygon-${field.column}-${field.row}`;
let unit = friendlyUnits.find(
(friendlyUnit) => friendlyUnit.position === fieldIdent,
);
if (!unit) {
unit = enemyUnits.find(
(enemyUnit) => enemyUnit.position === fieldIdent,
);
}
return (
<Field
column={field.column}
row={field.row}
key={fieldIdent}
unit={unit}
locationCallbackAction={finalizeActionOnField}
/>
);
})}
</g>
</svg>
</div>
</Window>
);
}
export default connect()(Location); | |
heroes-routing.module.ts | import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
@NgModule({
imports: [ | CommonModule
],
declarations: []
})
export class HeroesRoutingModule { } | |
inputtextdemo.ts | import {Component, ViewEncapsulation} from '@angular/core';
@Component({
templateUrl: './inputtextdemo.html',
styleUrls: ['./inputtextdemo.scss']
})
export class | {
disabled: boolean = true;
value1: string;
value2: string;
value3: string;
value4: string;
value5: string = 'Disabled';
value6: string;
} | InputTextDemo |
hard_softmax_nac.py | import math
import torch
from ..abstract import ExtendedTorchModule
from ..functional import sparsity_error
from ._abstract_recurrent_cell import AbstractRecurrentCell
class HardSoftmaxNACLayer(ExtendedTorchModule):
"""Implements the NAC (Neural Accumulator)
Arguments:
in_features: number of ingoing features
out_features: number of outgoing features
"""
def __init__(self, in_features, out_features, **kwargs):
super().__init__('nac', **kwargs)
self.in_features = in_features
self.out_features = out_features
# Define the target weights. Also, put 0 last such that p1 = p2 = 0
# corresponds to p3 = 1 => w = 0.
self.register_buffer('target_weights', torch.tensor([1, -1, 0], dtype=torch.float32))
# Initialize a tensor, that will be the placeholder for the hard samples | self.register_buffer('sample', torch.LongTensor(out_features, in_features))
# We will only two parameters per weight, this is to prevent the redundancy
# there would otherwise exist. This also makes it much more comparable with
# NAC.
self.W_hat = torch.nn.Parameter(torch.Tensor(out_features, in_features, 2))
self.register_buffer('W_hat_k', torch.Tensor(out_features, in_features, 1))
self.register_parameter('bias', None)
def reset_parameters(self):
# Use a gain of sqrt(0.5). Lets assume that softmax'(0) ~ 1, because this
# holds for sigmoid. Then:
# Var[W] = 1 * Var[S_1] - 1 * Var[S_2] + 0 * Var[S_3] = 2 / (fan[in] + fan[out])
# Var[W] = 2 * Var[S_i] = 2 / (fan[in] + fan[out])
# Var[S_i] = 1/2 * 2 / (fan[in] + fan[out])
# sqrt(Var[S_i]) = sqrt(1/2) * sqrt(2 / (fan[in] + fan[out]))
# This is not exactly true, because S_1, S_2, and S_3 are not enterily uncorrelated.
torch.nn.init.xavier_uniform_(self.W_hat, gain=math.sqrt(0.5))
torch.nn.init.constant_(self.W_hat_k, 0)
def forward(self, input, reuse=False):
# Concat trainable and non-trainable weights
W_hat_full = torch.cat((self.W_hat, self.W_hat_k), dim=-1) # size = [out, in, 3]
# Compute W_soft
pi = torch.nn.functional.softmax(W_hat_full, dim=-1)
W_soft = pi @ self.target_weights
# Compute W_hard
if not reuse:
torch.multinomial(pi.view(-1, 3), 1, True, out=self.sample.view(-1))
W_hard = self.target_weights[self.sample]
# Use W_hard in the forward pass, but use W_soft for the gradients.
# This implementation trick comes from torch.nn.functional.gumble_softmax(hard=True)
W = W_hard - W_soft.detach() + W_soft
# Compute the linear multiplication as usual
self.writer.add_histogram('W', W)
self.writer.add_tensor('W', W)
self.writer.add_scalar('W/sparsity_error', sparsity_error(W), verbose_only=False)
return torch.nn.functional.linear(input, W, self.bias)
def extra_repr(self):
return 'in_features={}, out_features={}'.format(
self.in_features, self.out_features
)
class HardSoftmaxNACCell(AbstractRecurrentCell):
"""Implements the Gumbel NAC (Gumbel Neural Accumulator) as a recurrent cell
Arguments:
input_size: number of ingoing features
hidden_size: number of outgoing features
"""
def __init__(self, input_size, hidden_size, **kwargs):
super().__init__(HardSoftmaxNACLayer, input_size, hidden_size, **kwargs) | |
request.go | // Code generated by MockGen. DO NOT EDIT.
// Source: github.com/Nivl/go-rest-tools/router (interfaces: HTTPRequest)
// Package mockrouter is a generated GoMock package.
package mockrouter
import (
reflect "reflect"
go_logger "github.com/Nivl/go-logger"
go_reporter "github.com/Nivl/go-reporter"
router "github.com/Nivl/go-rest-tools/router"
auth "github.com/Nivl/go-rest-tools/security/auth"
gomock "github.com/golang/mock/gomock"
)
// MockHTTPRequest is a mock of HTTPRequest interface
type MockHTTPRequest struct {
ctrl *gomock.Controller
recorder *MockHTTPRequestMockRecorder
}
// MockHTTPRequestMockRecorder is the mock recorder for MockHTTPRequest
type MockHTTPRequestMockRecorder struct {
mock *MockHTTPRequest
}
// NewMockHTTPRequest creates a new mock instance
func | (ctrl *gomock.Controller) *MockHTTPRequest {
mock := &MockHTTPRequest{ctrl: ctrl}
mock.recorder = &MockHTTPRequestMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use
func (m *MockHTTPRequest) EXPECT() *MockHTTPRequestMockRecorder {
return m.recorder
}
// ID mocks base method
func (m *MockHTTPRequest) ID() string {
ret := m.ctrl.Call(m, "ID")
ret0, _ := ret[0].(string)
return ret0
}
// ID indicates an expected call of ID
func (mr *MockHTTPRequestMockRecorder) ID() *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ID", reflect.TypeOf((*MockHTTPRequest)(nil).ID))
}
// Logger mocks base method
func (m *MockHTTPRequest) Logger() go_logger.Logger {
ret := m.ctrl.Call(m, "Logger")
ret0, _ := ret[0].(go_logger.Logger)
return ret0
}
// Logger indicates an expected call of Logger
func (mr *MockHTTPRequestMockRecorder) Logger() *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Logger", reflect.TypeOf((*MockHTTPRequest)(nil).Logger))
}
// Params mocks base method
func (m *MockHTTPRequest) Params() interface{} {
ret := m.ctrl.Call(m, "Params")
ret0, _ := ret[0].(interface{})
return ret0
}
// Params indicates an expected call of Params
func (mr *MockHTTPRequestMockRecorder) Params() *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Params", reflect.TypeOf((*MockHTTPRequest)(nil).Params))
}
// Reporter mocks base method
func (m *MockHTTPRequest) Reporter() go_reporter.Reporter {
ret := m.ctrl.Call(m, "Reporter")
ret0, _ := ret[0].(go_reporter.Reporter)
return ret0
}
// Reporter indicates an expected call of Reporter
func (mr *MockHTTPRequestMockRecorder) Reporter() *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Reporter", reflect.TypeOf((*MockHTTPRequest)(nil).Reporter))
}
// Response mocks base method
func (m *MockHTTPRequest) Response() router.HTTPResponse {
ret := m.ctrl.Call(m, "Response")
ret0, _ := ret[0].(router.HTTPResponse)
return ret0
}
// Response indicates an expected call of Response
func (mr *MockHTTPRequestMockRecorder) Response() *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Response", reflect.TypeOf((*MockHTTPRequest)(nil).Response))
}
// Session mocks base method
func (m *MockHTTPRequest) Session() *auth.Session {
ret := m.ctrl.Call(m, "Session")
ret0, _ := ret[0].(*auth.Session)
return ret0
}
// Session indicates an expected call of Session
func (mr *MockHTTPRequestMockRecorder) Session() *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Session", reflect.TypeOf((*MockHTTPRequest)(nil).Session))
}
// Signature mocks base method
func (m *MockHTTPRequest) Signature() string {
ret := m.ctrl.Call(m, "Signature")
ret0, _ := ret[0].(string)
return ret0
}
// Signature indicates an expected call of Signature
func (mr *MockHTTPRequestMockRecorder) Signature() *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Signature", reflect.TypeOf((*MockHTTPRequest)(nil).Signature))
}
// String mocks base method
func (m *MockHTTPRequest) String() string {
ret := m.ctrl.Call(m, "String")
ret0, _ := ret[0].(string)
return ret0
}
// String indicates an expected call of String
func (mr *MockHTTPRequestMockRecorder) String() *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "String", reflect.TypeOf((*MockHTTPRequest)(nil).String))
}
// User mocks base method
func (m *MockHTTPRequest) User() *auth.User {
ret := m.ctrl.Call(m, "User")
ret0, _ := ret[0].(*auth.User)
return ret0
}
// User indicates an expected call of User
func (mr *MockHTTPRequestMockRecorder) User() *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "User", reflect.TypeOf((*MockHTTPRequest)(nil).User))
}
| NewMockHTTPRequest |
bitesize.py | #!/usr/bin/python
#
# bitehist.py Block I/O size histogram.
# For Linux, uses BCC, eBPF. See .c file.
#
# USAGE: bitesize
#
# Ctrl-C will print the partially gathered histogram then exit.
#
# Copyright (c) 2016 Allan McAleavy
# Licensed under the Apache License, Version 2.0 (the "License")
#
# 05-Feb-2016 Allan McAleavy ran pep8 against file
from bcc import BPF
from time import sleep
bpf_text = """
#include <uapi/linux/ptrace.h>
#include <linux/blkdev.h>
struct proc_key_t {
char name[TASK_COMM_LEN];
u64 slot;
};
struct val_t {
char name[TASK_COMM_LEN];
};
BPF_HISTOGRAM(dist, struct proc_key_t);
BPF_HASH(commbyreq, struct request *, struct val_t);
int trace_pid_start(struct pt_regs *ctx, struct request *req)
{
struct val_t val = {};
if (bpf_get_current_comm(&val.name, sizeof(val.name)) == 0) {
commbyreq.update(&req, &val);
}
return 0;
}
int do_count(struct pt_regs *ctx, struct request *req)
{
struct val_t *valp;
valp = commbyreq.lookup(&req);
if (valp == 0) {
return 0;
}
if (req->__data_len > 0) {
struct proc_key_t key = {.slot = bpf_log2l(req->__data_len / 1024)}; | dist.increment(key);
}
return 0;
}
"""
# load BPF program
b = BPF(text=bpf_text)
b.attach_kprobe(event="blk_account_io_start", fn_name="trace_pid_start")
b.attach_kprobe(event="blk_account_io_completion", fn_name="do_count")
print("Tracing... Hit Ctrl-C to end.")
# trace until Ctrl-C
dist = b.get_table("dist")
try:
sleep(99999999)
except KeyboardInterrupt:
dist.print_log2_hist("Kbytes", "Process Name",
section_print_fn=bytes.decode) | bpf_probe_read(&key.name, sizeof(key.name),valp->name); |
test_ext_indicator_cycles.py | from pandas.core.series import Series |
from unittest import TestCase
from pandas import DataFrame
class TestCylesExtension(TestCase):
@classmethod
def setUpClass(cls):
cls.data = sample_data
@classmethod
def tearDownClass(cls):
del cls.data
def setUp(self): pass
def tearDown(self): pass
def test_ebsw_ext(self):
self.data.ta.ebsw(append=True)
self.assertIsInstance(self.data, DataFrame)
self.assertEqual(self.data.columns[-1], "EBSW_40_10") | from .config import sample_data
from .context import pandas_ta |
test_analisis_capital.py | # -*- coding: utf-8 -*-
# Copyright (c) 2021, orlando and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class Testanalisis_capital(unittest.TestCase):
| pass |
|
NodeReportType.go |
import (
"fmt"
)
type NodeReportType struct {
Upfr bool
}
func (n *NodeReportType) MarshalBinary() (data []byte, err error) {}
func (n *NodeReportType) UnmarshalBinary(data []byte) error {} | //go:binary-only-package
package pfcpType |
|
useDarkmodeSwitch.js | import { useEffect } from 'react'
import useStore from 'lib/store'
const useDarkmodeSwitch = () => {
const isDarkmode = useStore((state) => state.isDarkmode)
useEffect(() => { |
return null
}
export default useDarkmodeSwitch | document.documentElement.className = isDarkmode ? 'dark' : 'light'
}, [isDarkmode]) |
abvar.py | """
Base class for modular abelian varieties
AUTHORS:
- William Stein (2007-03)
TESTS::
sage: A = J0(33)
sage: D = A.decomposition(); D
[
Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33),
Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33),
Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33)
]
sage: loads(dumps(D)) == D
True
sage: loads(dumps(A)) == A
True
"""
###########################################################################
# Copyright (C) 2007 William Stein <[email protected]> #
# Distributed under the terms of the GNU General Public License (GPL) #
# http://www.gnu.org/licenses/ #
###########################################################################
from sage.categories.all import ModularAbelianVarieties
from sage.structure.sequence import Sequence, Sequence_generic
from sage.structure.parent_base import ParentWithBase
from morphism import HeckeOperator, Morphism, DegeneracyMap
from torsion_subgroup import RationalTorsionSubgroup, QQbarTorsionSubgroup
from finite_subgroup import (FiniteSubgroup_lattice, FiniteSubgroup, TorsionPoint)
from cuspidal_subgroup import CuspidalSubgroup, RationalCuspidalSubgroup, RationalCuspSubgroup
from sage.rings.all import (ZZ, QQ, QQbar, LCM,
divisors, Integer, prime_range)
from sage.rings.ring import is_Ring
from sage.modules.free_module import is_FreeModule
from sage.modular.arithgroup.all import is_CongruenceSubgroup, is_Gamma0, is_Gamma1, is_GammaH
from sage.modular.modsym.all import ModularSymbols
from sage.modular.modsym.space import ModularSymbolsSpace
from sage.matrix.all import matrix, block_diagonal_matrix, identity_matrix
from sage.modules.all import vector
from sage.groups.all import AbelianGroup
from sage.databases.cremona import cremona_letter_code
from sage.misc.all import prod
from copy import copy
import homology
import homspace
import lseries
def is_ModularAbelianVariety(x):
"""
Return True if x is a modular abelian variety.
INPUT:
- ``x`` - object
EXAMPLES::
sage: from sage.modular.abvar.abvar import is_ModularAbelianVariety
sage: is_ModularAbelianVariety(5)
False
sage: is_ModularAbelianVariety(J0(37))
True
Returning True is a statement about the data type not whether or
not some abelian variety is modular::
sage: is_ModularAbelianVariety(EllipticCurve('37a'))
False
"""
return isinstance(x, ModularAbelianVariety_abstract)
class ModularAbelianVariety_abstract(ParentWithBase):
def __init__(self, groups, base_field, is_simple=None, newform_level=None,
isogeny_number=None, number=None, check=True):
"""
Abstract base class for modular abelian varieties.
INPUT:
- ``groups`` - a tuple of congruence subgroups
- ``base_field`` - a field
- ``is_simple`` - bool; whether or not self is
simple
- ``newform_level`` - if self is isogenous to a
newform abelian variety, returns the level of that abelian variety
- ``isogeny_number`` - which isogeny class the
corresponding newform is in; this corresponds to the Cremona letter
code
- ``number`` - the t number of the degeneracy map that
this abelian variety is the image under
- ``check`` - whether to do some type checking on the
defining data
EXAMPLES: One should not create an instance of this class, but we
do so anyways here as an example::
sage: A = sage.modular.abvar.abvar.ModularAbelianVariety_abstract((Gamma0(37),), QQ)
sage: type(A)
<class 'sage.modular.abvar.abvar.ModularAbelianVariety_abstract_with_category'>
All hell breaks loose if you try to do anything with `A`::
sage: A
<repr(<sage.modular.abvar.abvar.ModularAbelianVariety_abstract_with_category at 0x...>) failed: NotImplementedError: BUG -- lattice method must be defined in derived class>
All instances of this class are in the category of modular
abelian varieties::
sage: A.category()
Category of modular abelian varieties over Rational Field
sage: J0(23).category()
Category of modular abelian varieties over Rational Field
"""
if check:
if not isinstance(groups, tuple):
raise TypeError("groups must be a tuple")
for G in groups:
if not is_CongruenceSubgroup(G):
raise TypeError("each element of groups must be a congruence subgroup")
self.__groups = groups
if is_simple is not None:
self.__is_simple = is_simple
if newform_level is not None:
self.__newform_level = newform_level
if number is not None:
self.__degen_t = number
if isogeny_number is not None:
self.__isogeny_number = isogeny_number
if check and not is_Ring(base_field) and base_field.is_field():
raise TypeError("base_field must be a field")
ParentWithBase.__init__(self, base_field, category = ModularAbelianVarieties(base_field))
def groups(self):
r"""
Return an ordered tuple of the congruence subgroups that the
ambient product Jacobian is attached to.
Every modular abelian variety is a finite quotient of an abelian
subvariety of a product of modular Jacobians `J_\Gamma`.
This function returns a tuple containing the groups
`\Gamma`.
EXAMPLES::
sage: A = (J0(37) * J1(13))[0]; A
Simple abelian subvariety 13aG1(1,13) of dimension 2 of J0(37) x J1(13)
sage: A.groups()
(Congruence Subgroup Gamma0(37), Congruence Subgroup Gamma1(13))
"""
return self.__groups
#############################################################################
# lattice() *must* be defined by every derived class!!!!
def lattice(self):
"""
Return lattice in ambient cuspidal modular symbols product that
defines this modular abelian variety.
This must be defined in each derived class.
OUTPUT: a free module over `\ZZ`
EXAMPLES::
sage: A = sage.modular.abvar.abvar.ModularAbelianVariety_abstract((Gamma0(37),), QQ)
sage: A
<repr(<sage.modular.abvar.abvar.ModularAbelianVariety_abstract_with_category at 0x...>) failed: NotImplementedError: BUG -- lattice method must be defined in derived class>
"""
raise NotImplementedError("BUG -- lattice method must be defined in derived class")
#############################################################################
def free_module(self):
r"""
Synonym for ``self.lattice()``.
OUTPUT: a free module over `\ZZ`
EXAMPLES::
sage: J0(37).free_module()
Ambient free module of rank 4 over the principal ideal domain Integer Ring
sage: J0(37)[0].free_module()
Free module of degree 4 and rank 2 over Integer Ring
Echelon basis matrix:
[ 1 -1 1 0]
[ 0 0 2 -1]
"""
return self.lattice()
def vector_space(self):
r"""
Return vector space corresponding to the modular abelian variety.
This is the lattice tensored with `\QQ`.
EXAMPLES::
sage: J0(37).vector_space()
Vector space of dimension 4 over Rational Field
sage: J0(37)[0].vector_space()
Vector space of degree 4 and dimension 2 over Rational Field
Basis matrix:
[ 1 -1 0 1/2]
[ 0 0 1 -1/2]
"""
try:
return self.__vector_space
except AttributeError:
self.__vector_space = self.lattice().change_ring(QQ)
return self.__vector_space
def base_field(self):
r"""
Synonym for ``self.base_ring()``.
EXAMPLES::
sage: J0(11).base_field()
Rational Field
"""
return self.base_ring()
def base_extend(self, K):
"""
EXAMPLES::
sage: A = J0(37); A
Abelian variety J0(37) of dimension 2
sage: A.base_extend(QQbar)
Abelian variety J0(37) over Algebraic Field of dimension 2
sage: A.base_extend(GF(7))
Abelian variety J0(37) over Finite Field of size 7 of dimension 2
"""
N = self.__newform_level if hasattr(self, '__newform_level') else None
return ModularAbelianVariety(self.groups(), self.lattice(), K, newform_level=N)
def __contains__(self, x):
"""
Determine whether or not self contains x.
EXAMPLES::
sage: J = J0(67); G = (J[0] + J[1]).intersection(J[1] + J[2])
sage: G[0]
Finite subgroup with invariants [5, 10] over QQbar of Abelian subvariety of dimension 3 of J0(67)
sage: a = G[0].0; a
[(1/10, 1/10, 3/10, 1/2, 1, -2, -3, 33/10, 0, -1/2)]
sage: a in J[0]
False
sage: a in (J[0]+J[1])
True
sage: a in (J[1]+J[2])
True
sage: C = G[1] # abelian variety in kernel
sage: G[0].0
[(1/10, 1/10, 3/10, 1/2, 1, -2, -3, 33/10, 0, -1/2)]
sage: 5*G[0].0
[(1/2, 1/2, 3/2, 5/2, 5, -10, -15, 33/2, 0, -5/2)]
sage: 5*G[0].0 in C
True
"""
if not isinstance(x, TorsionPoint):
return False
if x.parent().abelian_variety().groups() != self.groups():
return False
v = x.element()
n = v.denominator()
nLambda = self.ambient_variety().lattice().scale(n)
return n*v in self.lattice() + nLambda
def __cmp__(self, other):
"""
Compare two modular abelian varieties.
If other is not a modular abelian variety, compares the types of
self and other. If other is a modular abelian variety, compares the
groups, then if those are the same, compares the newform level and
isogeny class number and degeneracy map numbers. If those are not
defined or matched up, compare the underlying lattices.
EXAMPLES::
sage: cmp(J0(37)[0], J0(37)[1])
-1
sage: cmp(J0(33)[0], J0(33)[1])
-1
sage: cmp(J0(37), 5) #random
1
"""
if not isinstance(other, ModularAbelianVariety_abstract):
return cmp(type(self), type(other))
if self is other:
return 0
c = cmp(self.groups(), other.groups())
if c: return c
try:
c = cmp(self.__newform_level, other.__newform_level)
if c: return c
except AttributeError:
pass
try:
c = cmp(self.__isogeny_number, other.__isogeny_number)
if c: return c
except AttributeError:
pass
try:
c = cmp(self.__degen_t, other.__degen_t)
if c: return c
except AttributeError:
pass
# NOTE!! having the same newform level, isogeny class number,
# and degen_t does not imply two abelian varieties are equal.
# See the docstring for self.label.
return cmp(self.lattice(), other.lattice())
def __radd__(self,other):
"""
Return other + self when other is 0. Otherwise raise a TypeError.
EXAMPLES::
sage: int(0) + J0(37)
Abelian variety J0(37) of dimension 2
"""
if other == 0:
return self
raise TypeError
def _repr_(self):
"""
Return string representation of this modular abelian variety.
This is just the generic base class, so it's unlikely to be called
in practice.
EXAMPLES::
sage: A = J0(23)
sage: import sage.modular.abvar.abvar as abvar
sage: abvar.ModularAbelianVariety_abstract._repr_(A)
'Abelian variety J0(23) of dimension 2'
::
sage: (J0(11) * J0(33))._repr_()
'Abelian variety J0(11) x J0(33) of dimension 4'
"""
field = '' if self.base_field() == QQ else ' over %s'%self.base_field()
#if self.newform_level(none_if_not_known=True) is None:
simple = self.is_simple(none_if_not_known=True)
if simple and self.dimension() > 0:
label = self.label() + ' '
else:
label = ''
simple = 'Simple a' if simple else 'A'
if self.is_ambient():
return '%sbelian variety %s%s of dimension %s'%(simple, self._ambient_repr(), field, self.dimension())
if self.is_subvariety_of_ambient_jacobian():
sub = 'subvariety'
else:
sub = 'variety factor'
return "%sbelian %s %sof dimension %s of %s%s"%(
simple, sub, label, self.dimension(), self._ambient_repr(), field)
def label(self):
r"""
Return the label associated to this modular abelian variety.
The format of the label is [level][isogeny class][group](t, ambient
level)
If this abelian variety `B` has the above label, this
implies only that `B` is isogenous to the newform abelian
variety `A_f` associated to the newform with label
[level][isogeny class][group]. The [group] is empty for
`\Gamma_0(N)`, is G1 for `\Gamma_1(N)` and is
GH[...] for `\Gamma_H(N)`.
.. warning::
The sum of `\delta_s(A_f)` for all `s\mid t`
contains `A`, but no sum for a proper divisor of
`t` contains `A`. It need *not* be the case
that `B` is equal to `\delta_t(A_f)`!!!
OUTPUT: string
EXAMPLES::
sage: J0(11).label()
'11a(1,11)'
sage: J0(11)[0].label()
'11a(1,11)'
sage: J0(33)[2].label()
'33a(1,33)'
sage: J0(22).label()
Traceback (most recent call last):
...
ValueError: self must be simple
We illustrate that self need not equal `\delta_t(A_f)`::
sage: J = J0(11); phi = J.degeneracy_map(33, 1) + J.degeneracy_map(33,3)
sage: B = phi.image(); B
Abelian subvariety of dimension 1 of J0(33)
sage: B.decomposition()
[
Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33)
]
sage: C = J.degeneracy_map(33,3).image(); C
Abelian subvariety of dimension 1 of J0(33)
sage: C == B
False
"""
degen = str(self.degen_t()).replace(' ','')
return '%s%s'%(self.newform_label(), degen)
def newform_label(self):
"""
Return the label [level][isogeny class][group] of the newform
`f` such that this abelian variety is isogenous to the
newform abelian variety `A_f`. If this abelian variety is
not simple, raise a ValueError.
OUTPUT: string
EXAMPLES::
sage: J0(11).newform_label()
'11a'
sage: J0(33)[2].newform_label()
'33a'
The following fails since `J_0(33)` is not simple::
sage: J0(33).newform_label()
Traceback (most recent call last):
...
ValueError: self must be simple
"""
N, G = self.newform_level()
if is_Gamma0(G):
group = ''
elif is_Gamma1(G):
group = 'G1'
elif is_GammaH(G):
group = 'GH%s'%(str(G._generators_for_H()).replace(' ',''))
return '%s%s%s'%(N, cremona_letter_code(self.isogeny_number()), group)
def _isogeny_to_newform_abelian_variety(self):
r"""
Return an isogeny from self to an abelian variety `A_f`
attached to a newform. If self is not simple (so that no such
isogeny exists), raise a ValueError.
EXAMPLES::
sage: J0(22)[0]._isogeny_to_newform_abelian_variety()
Abelian variety morphism:
From: Simple abelian subvariety 11a(1,22) of dimension 1 of J0(22)
To: Newform abelian subvariety 11a of dimension 1 of J0(11)
sage: J = J0(11); phi = J.degeneracy_map(33, 1) + J.degeneracy_map(33,3)
sage: A = phi.image()
sage: A._isogeny_to_newform_abelian_variety().matrix()
[-3 3]
[ 0 -3]
"""
try:
return self._newform_isogeny
except AttributeError:
pass
if not self.is_simple():
raise ValueError("self is not simple")
ls = []
t, N = self.decomposition()[0].degen_t()
A = self.ambient_variety()
for i in range(len(self.groups())):
g = self.groups()[i]
if N == g.level():
J = g.modular_abelian_variety()
d = J.degeneracy_map(self.newform_level()[0], t)
p = A.project_to_factor(i)
mat = p.matrix() * d.matrix()
if not (self.lattice().matrix() * mat).is_zero():
break
from constructor import AbelianVariety
Af = AbelianVariety(self.newform_label())
H = A.Hom(Af.ambient_variety())
m = H(Morphism(H, mat))
self._newform_isogeny = m.restrict_domain(self).restrict_codomain(Af)
return self._newform_isogeny
def _simple_isogeny(self, other):
"""
Given self and other, if both are simple, and correspond to the
same newform with the same congruence subgroup, return an isogeny.
Otherwise, raise a ValueError.
INPUT:
- ``self, other`` - modular abelian varieties
OUTPUT: an isogeny
EXAMPLES::
sage: J = J0(33); J
Abelian variety J0(33) of dimension 3
sage: J[0]._simple_isogeny(J[1])
Abelian variety morphism:
From: Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)
To: Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33)
The following illustrates how simple isogeny is only implemented
when the ambients are the same::
sage: J[0]._simple_isogeny(J1(11))
Traceback (most recent call last):
...
NotImplementedError: _simple_isogeny only implemented when both abelian variety have the same ambient product Jacobian
"""
if not is_ModularAbelianVariety(other):
raise TypeError("other must be a modular abelian variety")
if not self.is_simple():
raise ValueError("self is not simple")
if not other.is_simple():
raise ValueError("other is not simple")
if self.groups() != other.groups():
# The issue here is that the stuff below probably won't make any sense at all if we don't know
# that the two newform abelian varieties $A_f$ are identical.
raise NotImplementedError("_simple_isogeny only implemented when both abelian variety have the same ambient product Jacobian")
if (self.newform_level() != other.newform_level()) or \
(self.isogeny_number() != other.isogeny_number()):
raise ValueError("self and other do not correspond to the same newform")
return other._isogeny_to_newform_abelian_variety().complementary_isogeny() * \
self._isogeny_to_newform_abelian_variety()
def _Hom_(self, B, cat=None):
"""
INPUT:
- ``B`` - modular abelian varieties
- ``cat`` - category
EXAMPLES::
sage: J0(37)._Hom_(J1(37))
Space of homomorphisms from Abelian variety J0(37) of dimension 2 to Abelian variety J1(37) of dimension 40
sage: J0(37)._Hom_(J1(37)).homset_category()
Category of modular abelian varieties over Rational Field
"""
if cat is None:
K = self.base_field(); L = B.base_field()
if K == L:
F = K
elif K == QQbar or L == QQbar:
F = QQbar
else:
# TODO -- improve this
raise ValueError("please specify a category")
cat = ModularAbelianVarieties(F)
if self is B:
return self.endomorphism_ring(cat)
else:
return homspace.Homspace(self, B, cat)
def in_same_ambient_variety(self, other):
"""
Return True if self and other are abelian subvarieties of the same
ambient product Jacobian.
EXAMPLES::
sage: A,B,C = J0(33)
sage: A.in_same_ambient_variety(B)
True
sage: A.in_same_ambient_variety(J0(11))
False
"""
if not is_ModularAbelianVariety(other):
return False
if self.groups() != other.groups():
return False
if not self.is_subvariety_of_ambient_jacobian() or not other.is_subvariety_of_ambient_jacobian():
return False
return True
def modular_kernel(self):
"""
Return the modular kernel of this abelian variety, which is the
kernel of the canonical polarization of self.
EXAMPLES::
sage: A = AbelianVariety('33a'); A
Newform abelian subvariety 33a of dimension 1 of J0(33)
sage: A.modular_kernel()
Finite subgroup with invariants [3, 3] over QQ of Newform abelian subvariety 33a of dimension 1 of J0(33)
"""
try:
return self.__modular_kernel
except AttributeError:
_, f, _ = self.dual()
G = f.kernel()[0]
self.__modular_kernel = G
return G
def modular_degree(self):
"""
Return the modular degree of this abelian variety, which is the
square root of the degree of the modular kernel.
EXAMPLES::
sage: A = AbelianVariety('37a')
sage: A.modular_degree()
2
"""
n = self.modular_kernel().order()
return ZZ(n.sqrt())
def intersection(self, other):
"""
Returns the intersection of self and other inside a common ambient
Jacobian product.
INPUT:
- ``other`` - a modular abelian variety or a finite
group
OUTPUT: If other is a modular abelian variety:
- ``G`` - finite subgroup of self
- ``A`` - abelian variety (identity component of
intersection) If other is a finite group:
- ``G`` - a finite group
EXAMPLES: We intersect some abelian varieties with finite
intersection.
::
sage: J = J0(37)
sage: J[0].intersection(J[1])
(Finite subgroup with invariants [2, 2] over QQ of Simple abelian subvariety 37a(1,37) of dimension 1 of J0(37), Simple abelian subvariety of dimension 0 of J0(37))
::
sage: D = list(J0(65)); D
[Simple abelian subvariety 65a(1,65) of dimension 1 of J0(65), Simple abelian subvariety 65b(1,65) of dimension 2 of J0(65), Simple abelian subvariety 65c(1,65) of dimension 2 of J0(65)]
sage: D[0].intersection(D[1])
(Finite subgroup with invariants [2] over QQ of Simple abelian subvariety 65a(1,65) of dimension 1 of J0(65), Simple abelian subvariety of dimension 0 of J0(65))
sage: (D[0]+D[1]).intersection(D[1]+D[2])
(Finite subgroup with invariants [2] over QQbar of Abelian subvariety of dimension 3 of J0(65), Abelian subvariety of dimension 2 of J0(65))
::
sage: J = J0(33)
sage: J[0].intersection(J[1])
(Finite subgroup with invariants [5] over QQ of Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33), Simple abelian subvariety of dimension 0 of J0(33))
Next we intersect two abelian varieties with non-finite
intersection::
sage: J = J0(67); D = J.decomposition(); D
[
Simple abelian subvariety 67a(1,67) of dimension 1 of J0(67),
Simple abelian subvariety 67b(1,67) of dimension 2 of J0(67),
Simple abelian subvariety 67c(1,67) of dimension 2 of J0(67)
]
sage: (D[0] + D[1]).intersection(D[1] + D[2])
(Finite subgroup with invariants [5, 10] over QQbar of Abelian subvariety of dimension 3 of J0(67), Abelian subvariety of dimension 2 of J0(67))
"""
# First check whether we are intersecting an abelian variety
# with a finite subgroup. If so, call the intersection method
# for the finite group, which does know how to intersect with
# an abelian variety.
if isinstance(other, FiniteSubgroup):
return other.intersection(self)
# Now both self and other are abelian varieties. We require
# at least that the ambient Jacobian product is the same for
# them.
if not self.in_same_ambient_variety(other):
raise TypeError("other must be an abelian variety in the same ambient space")
# 1. Compute the abelian variety (connected) part of the intersection
V = self.vector_space().intersection(other.vector_space())
if V.dimension() > 0:
# If there is a nonzero abelian variety, get the actual
# lattice that defines it. We intersect (=saturate) in
# the sum of the lattices, to ensure that the intersection
# is an abelian subvariety of both self and other (even if
# they aren't subvarieties of the ambient Jacobian).
lattice = V.intersection(self.lattice() + other.lattice())
A = ModularAbelianVariety(self.groups(), lattice, self.base_field(), check=False)
else:
A = self.zero_subvariety()
# 2. Compute the finite intersection group when the
# intersection is finite, or a group that maps surjectively
# onto the component group in general.
# First we get basis matrices for the lattices that define
# both abelian varieties.
L = self.lattice().basis_matrix()
M = other.lattice().basis_matrix()
# Then we stack matrices and find a subset that forms a
# basis.
LM = L.stack(M)
P = LM.pivot_rows()
V = (ZZ**L.ncols()).span_of_basis([LM.row(p) for p in P])
S = (self.lattice() + other.lattice()).saturation()
n = self.lattice().rank()
# Finally we project onto the L factor.
gens = [L.linear_combination_of_rows(v.list()[:n])
for v in V.coordinate_module(S).basis()]
if A.dimension() > 0:
finitegroup_base_field = QQbar
else:
finitegroup_base_field = self.base_field()
G = self.finite_subgroup(gens, field_of_definition=finitegroup_base_field)
return G, A
def __add__(self, other):
r"""
Returns the sum of the *images* of self and other inside the
ambient Jacobian product. self and other must be abelian
subvarieties of the ambient Jacobian product.
..warning::
The sum of course only makes sense in some ambient variety,
and by definition this function takes the sum of the images
of both self and other in the ambient product Jacobian.
EXAMPLES: We compute the sum of two abelian varieties of
`J_0(33)`::
sage: J = J0(33)
sage: J[0] + J[1]
Abelian subvariety of dimension 2 of J0(33)
We sum all three and get the full `J_0(33)`::
sage: (J[0] + J[1]) + (J[1] + J[2])
Abelian variety J0(33) of dimension 3
Adding to zero works::
sage: J[0] + 0
Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)
Hence the sum command works::
sage: sum([J[0], J[2]])
Abelian subvariety of dimension 2 of J0(33)
We try to add something in `J_0(33)` to something in
`J_0(11)`; this shouldn't and doesn't work.
::
sage: J[0] + J0(11)
Traceback (most recent call last):
...
TypeError: sum not defined since ambient spaces different
We compute the diagonal image of `J_0(11)` in
`J_0(33)`, then add the result to the new elliptic curve
of level `33`.
::
sage: A = J0(11)
sage: B = (A.degeneracy_map(33,1) + A.degeneracy_map(33,3)).image()
sage: B + J0(33)[2]
Abelian subvariety of dimension 2 of J0(33)
TESTS: This exposed a bug in HNF (see trac #4527)::
sage: A = J0(206).new_subvariety().decomposition()[3] ; A # long time
Simple abelian subvariety 206d(1,206) of dimension 4 of J0(206)
sage: B = J0(206).old_subvariety(2) ; B # long time
Abelian subvariety of dimension 16 of J0(206)
sage: A+B # long time
Abelian subvariety of dimension 20 of J0(206)
"""
if not is_ModularAbelianVariety(other):
if other == 0:
return self
raise TypeError("other must be a modular abelian variety")
if self.groups() != other.groups():
raise ValueError("incompatible ambient Jacobians")
L = self.vector_space() + other.vector_space()
M = L.intersection(self._ambient_lattice())
return ModularAbelianVariety(self.groups(), M, self.base_field(), check=False)
def direct_product(self, other):
"""
Compute the direct product of self and other.
INPUT:
- ``self, other`` - modular abelian varieties
OUTPUT: abelian variety
EXAMPLES::
sage: J0(11).direct_product(J1(13))
Abelian variety J0(11) x J1(13) of dimension 3
sage: A = J0(33)[0].direct_product(J0(33)[1]); A
Abelian subvariety of dimension 2 of J0(33) x J0(33)
sage: A.lattice()
Free module of degree 12 and rank 4 over Integer Ring
Echelon basis matrix:
[ 1 1 -2 0 2 -1 0 0 0 0 0 0]
[ 0 3 -2 -1 2 0 0 0 0 0 0 0]
[ 0 0 0 0 0 0 1 0 0 0 -1 2]
[ 0 0 0 0 0 0 0 1 -1 1 0 -2]
"""
return self * other
def __pow__(self, n):
"""
Return `n^{th}` power of self.
INPUT:
- ``n`` - a nonnegative integer
OUTPUT: an abelian variety
EXAMPLES::
sage: J = J0(37)
sage: J^0
Simple abelian subvariety of dimension 0 of J0(37)
sage: J^1
Abelian variety J0(37) of dimension 2
sage: J^1 is J
True
"""
n = ZZ(n)
if n < 0:
raise ValueError("n must be nonnegative")
if n == 0:
return self.zero_subvariety()
if n == 1:
return self
groups = self.groups() * n
L = self.lattice().basis_matrix()
lattice = block_diagonal_matrix([L]*n).row_module(ZZ)
return ModularAbelianVariety(groups, lattice, self.base_field(), check=False)
def __mul__(self, other):
"""
Compute the direct product of self and other.
EXAMPLES: Some modular Jacobians::
sage: J0(11) * J0(33)
Abelian variety J0(11) x J0(33) of dimension 4
sage: J0(11) * J0(33) * J0(11)
Abelian variety J0(11) x J0(33) x J0(11) of dimension 5
We multiply some factors of `J_0(65)`::
sage: d = J0(65).decomposition()
sage: d[0] * d[1] * J0(11)
Abelian subvariety of dimension 4 of J0(65) x J0(65) x J0(11)
"""
if not is_ModularAbelianVariety(other):
raise TypeError("other must be a modular abelian variety")
if other.base_ring() != self.base_ring():
raise TypeError("self and other must have the same base ring")
groups = tuple(list(self.groups()) + list(other.groups()))
lattice = self.lattice().direct_sum(other.lattice())
base_field = self.base_ring()
return ModularAbelianVariety(groups, lattice, base_field, check=False)
def quotient(self, other):
"""
Compute the quotient of self and other, where other is either an
abelian subvariety of self or a finite subgroup of self.
INPUT:
- ``other`` - a finite subgroup or subvariety
OUTPUT: a pair (A, phi) with phi the quotient map from self to A
EXAMPLES: We quotient `J_0(33)` out by an abelian
subvariety::
sage: Q, f = J0(33).quotient(J0(33)[0])
sage: Q
Abelian variety factor of dimension 2 of J0(33)
sage: f
Abelian variety morphism:
From: Abelian variety J0(33) of dimension 3
To: Abelian variety factor of dimension 2 of J0(33)
We quotient `J_0(33)` by the cuspidal subgroup::
sage: C = J0(33).cuspidal_subgroup()
sage: Q, f = J0(33).quotient(C)
sage: Q
Abelian variety factor of dimension 3 of J0(33)
sage: f.kernel()[0]
Finite subgroup with invariants [10, 10] over QQ of Abelian variety J0(33) of dimension 3
sage: C
Finite subgroup with invariants [10, 10] over QQ of Abelian variety J0(33) of dimension 3
sage: J0(11).direct_product(J1(13))
Abelian variety J0(11) x J1(13) of dimension 3
"""
return self.__div__(other)
def __div__(self, other):
"""
Compute the quotient of self and other, where other is either an
abelian subvariety of self or a finite subgroup of self.
INPUT:
- ``other`` - a finite subgroup or subvariety
EXAMPLES: Quotient out by a finite group::
sage: J = J0(67); G = (J[0] + J[1]).intersection(J[1] + J[2])
sage: Q, _ = J/G[0]; Q
Abelian variety factor of dimension 5 of J0(67) over Algebraic Field
sage: Q.base_field()
Algebraic Field
sage: Q.lattice()
Free module of degree 10 and rank 10 over Integer Ring
Echelon basis matrix:
[1/10 1/10 3/10 1/2 0 0 0 3/10 0 1/2]
[ 0 1/5 4/5 4/5 0 0 0 0 0 3/5]
...
Quotient out by an abelian subvariety::
sage: A, B, C = J0(33)
sage: Q, phi = J0(33)/A
sage: Q
Abelian variety factor of dimension 2 of J0(33)
sage: phi.domain()
Abelian variety J0(33) of dimension 3
sage: phi.codomain()
Abelian variety factor of dimension 2 of J0(33)
sage: phi.kernel()
(Finite subgroup with invariants [2] over QQbar of Abelian variety J0(33) of dimension 3,
Abelian subvariety of dimension 1 of J0(33))
sage: phi.kernel()[1] == A
True
The abelian variety we quotient out by must be an abelian
subvariety.
::
sage: Q = (A + B)/C; Q
Traceback (most recent call last):
...
TypeError: other must be a subgroup or abelian subvariety
"""
if isinstance(other, FiniteSubgroup):
if other.abelian_variety() != self:
other = self.finite_subgroup(other)
return self._quotient_by_finite_subgroup(other)
elif isinstance(other, ModularAbelianVariety_abstract) and other.is_subvariety(self):
return self._quotient_by_abelian_subvariety(other)
else:
raise TypeError("other must be a subgroup or abelian subvariety")
def degeneracy_map(self, M_ls, t_ls):
"""
Return the degeneracy map with domain self and given
level/parameter. If self.ambient_variety() is a product of
Jacobians (as opposed to a single Jacobian), then one can provide a
list of new levels and parameters, corresponding to the ambient
Jacobians in order. (See the examples below.)
INPUT:
- ``M, t`` - integers level and `t`, or
- ``Mlist, tlist`` - if self is in a nontrivial
product ambient Jacobian, input consists of a list of levels and
corresponding list of `t`'s.
OUTPUT: a degeneracy map
EXAMPLES: We make several degeneracy maps related to
`J_0(11)` and `J_0(33)` and compute their
matrices.
::
sage: d1 = J0(11).degeneracy_map(33, 1); d1
Degeneracy map from Abelian variety J0(11) of dimension 1 to Abelian variety J0(33) of dimension 3 defined by [1]
sage: d1.matrix()
[ 0 -3 2 1 -2 0]
[ 1 -2 0 1 0 -1]
sage: d2 = J0(11).degeneracy_map(33, 3); d2
Degeneracy map from Abelian variety J0(11) of dimension 1 to Abelian variety J0(33) of dimension 3 defined by [3]
sage: d2.matrix()
[-1 0 0 0 1 -2]
[-1 -1 1 -1 1 0]
sage: d3 = J0(33).degeneracy_map(11, 1); d3
Degeneracy map from Abelian variety J0(33) of dimension 3 to Abelian variety J0(11) of dimension 1 defined by [1]
He we verify that first mapping from level `11` to level
`33`, then back is multiplication by `4`::
sage: d1.matrix() * d3.matrix()
[4 0]
[0 4]
We compute a more complicated degeneracy map involving nontrivial
product ambient Jacobians; note that this is just the block direct
sum of the two matrices at the beginning of this example::
sage: d = (J0(11)*J0(11)).degeneracy_map([33,33], [1,3]); d
Degeneracy map from Abelian variety J0(11) x J0(11) of dimension 2 to Abelian variety J0(33) x J0(33) of dimension 6 defined by [1, 3]
sage: d.matrix()
[ 0 -3 2 1 -2 0 0 0 0 0 0 0]
[ 1 -2 0 1 0 -1 0 0 0 0 0 0]
[ 0 0 0 0 0 0 -1 0 0 0 1 -2]
[ 0 0 0 0 0 0 -1 -1 1 -1 1 0]
"""
if not isinstance(M_ls, list):
M_ls = [M_ls]
if not isinstance(t_ls, list):
t_ls = [t_ls]
groups = self.groups()
length = len(M_ls)
if length != len(t_ls):
raise ValueError("must have same number of Ms and ts")
if length != len(groups):
raise ValueError("must have same number of Ms and groups in ambient variety")
for i in range(length):
N = groups[i].level()
if (M_ls[i]%N) and (N%M_ls[i]):
raise ValueError("one level must divide the other in %s-th component"%i)
if (( max(M_ls[i],N) // min(M_ls[i],N) ) % t_ls[i]):
raise ValueError("each t must divide the quotient of the levels")
ls = [ self.groups()[i].modular_abelian_variety().degeneracy_map(M_ls[i], t_ls[i]).matrix() for i in range(length) ]
new_codomain = prod([ self.groups()[i]._new_group_from_level(M_ls[i]).modular_abelian_variety()
for i in range(length) ])
M = block_diagonal_matrix(ls, subdivide=False)
H = self.Hom(new_codomain)
return H(DegeneracyMap(H, M.restrict_domain(self.lattice()), t_ls))
def _quotient_by_finite_subgroup(self, G):
"""
Return the quotient of self by the finite subgroup `G`.
This is used internally by the quotient and __div__ commands.
INPUT:
- ``G`` - a finite subgroup of self
OUTPUT: abelian variety - the quotient `Q` of self by
`G`
- ``morphism`` - from self to the quotient
`Q`
EXAMPLES: We quotient the elliptic curve `J_0(11)` out by
its cuspidal subgroup.
::
sage: A = J0(11)
sage: G = A.cuspidal_subgroup(); G
Finite subgroup with invariants [5] over QQ of Abelian variety J0(11) of dimension 1
sage: Q, f = A._quotient_by_finite_subgroup(G)
sage: Q
Abelian variety factor of dimension 1 of J0(11)
sage: f
Abelian variety morphism:
From: Abelian variety J0(11) of dimension 1
To: Abelian variety factor of dimension 1 of J0(11)
We compute the finite kernel of `f` (hence the [0]) and
note that it equals the subgroup `G` that we quotiented out
by::
sage: f.kernel()[0] == G
True
"""
if G.order() == 1:
return self
L = self.lattice() + G.lattice()
A = ModularAbelianVariety(self.groups(), L, G.field_of_definition())
M = L.coordinate_module(self.lattice()).basis_matrix()
phi = self.Hom(A)(M)
return A, phi
def _quotient_by_abelian_subvariety(self, B):
"""
Return the quotient of self by the abelian variety `B`.
This is used internally by the quotient and __div__ commands.
INPUT:
- ``B`` - an abelian subvariety of self
OUTPUT:
- ``abelian variety`` - quotient `Q` of self
by B
- ``morphism`` - from self to the quotient
`Q`
EXAMPLES: We compute the new quotient of `J_0(33)`.
::
sage: A = J0(33); B = A.old_subvariety()
sage: Q, f = A._quotient_by_abelian_subvariety(B)
Note that the quotient happens to also be an abelian subvariety::
sage: Q
Abelian subvariety of dimension 1 of J0(33)
sage: Q.lattice()
Free module of degree 6 and rank 2 over Integer Ring
Echelon basis matrix:
[ 1 0 0 -1 0 0]
[ 0 0 1 0 1 -1]
sage: f
Abelian variety morphism:
From: Abelian variety J0(33) of dimension 3
To: Abelian subvariety of dimension 1 of J0(33)
We verify that `B` is equal to the kernel of the quotient
map.
::
sage: f.kernel()[1] == B
True
Next we quotient `J_0(33)` out by `Q` itself::
sage: C, g = A._quotient_by_abelian_subvariety(Q)
The result is not a subvariety::
sage: C
Abelian variety factor of dimension 2 of J0(33)
sage: C.lattice()
Free module of degree 6 and rank 4 over Integer Ring
Echelon basis matrix:
[ 1/3 0 0 2/3 -1 0]
[ 0 1 0 0 -1 1]
[ 0 0 1/3 0 -2/3 2/3]
[ 0 0 0 1 -1 -1]
"""
# We first compute the complement of B in self to get
# an abelian variety C also in self such that self/B
# is isogenous to C. This is the case because the
# projection map pi:self --> C is surjective and has
# kernel a finite extension of the abelian variety B.
C = B.complement(self)
# Now that we have C we need to find some abelian variety Q
# isogenous to C and a map self --> Q whose kernel is exactly
# B. We do this by computing the kernel of the map pi below,
# which is an extension of the abelian variety B by a finite
# group Phi of complements. Our strategy is to enlarge the
# lattice that defines C so that the map pi below suddenly
# has connected kernel.
pi = self.projection(C)
psi = pi.factor_out_component_group()
Q = psi.codomain()
return Q, psi
def projection(self, A, check=True):
"""
Given an abelian subvariety A of self, return a projection morphism
from self to A. Note that this morphism need not be unique.
INPUT:
- ``A`` - an abelian variety
OUTPUT: a morphism
EXAMPLES::
sage: a,b,c = J0(33)
sage: pi = J0(33).projection(a); pi.matrix()
[ 3 -2]
[-5 5]
[-4 1]
[ 3 -2]
[ 5 0]
[ 1 1]
sage: pi = (a+b).projection(a); pi.matrix()
[ 0 0]
[-3 2]
[-4 1]
[-1 -1]
sage: pi = a.projection(a); pi.matrix()
[1 0]
[0 1]
We project onto a factor in a product of two Jacobians::
sage: A = J0(11)*J0(11); A
Abelian variety J0(11) x J0(11) of dimension 2
sage: A[0]
Simple abelian subvariety 11a(1,11) of dimension 1 of J0(11) x J0(11)
sage: A.projection(A[0])
Abelian variety morphism:
From: Abelian variety J0(11) x J0(11) of dimension 2
To: Simple abelian subvariety 11a(1,11) of dimension 1 of J0(11) x J0(11)
sage: A.projection(A[0]).matrix()
[0 0]
[0 0]
[1 0]
[0 1]
sage: A.projection(A[1]).matrix()
[1 0]
[0 1]
[0 0]
[0 0]
"""
if check and not A.is_subvariety(self):
raise ValueError("A must be an abelian subvariety of self")
W = A.complement(self)
mat = A.lattice().basis_matrix().stack(W.lattice().basis_matrix())
# solve X * mat = self, i.e. write each row of self in terms of the
# rows of mat.
X = mat.solve_left(self.lattice().basis_matrix())
# The projection map is got from the first 2*dim(A) columns of X.
X = X.matrix_from_columns(range(2*A.dimension()))
X, _ = X._clear_denom()
return Morphism(self.Hom(A), X)
def project_to_factor(self, n):
"""
If self is an ambient product of Jacobians, return a projection
from self to the nth such Jacobian.
EXAMPLES::
sage: J = J0(33)
sage: J.project_to_factor(0)
Abelian variety endomorphism of Abelian variety J0(33) of dimension 3
::
sage: J = J0(33) * J0(37) * J0(11)
sage: J.project_to_factor(2)
Abelian variety morphism:
From: Abelian variety J0(33) x J0(37) x J0(11) of dimension 6
To: Abelian variety J0(11) of dimension 1
sage: J.project_to_factor(2).matrix()
[0 0]
[0 0]
[0 0]
[0 0]
[0 0]
[0 0]
[0 0]
[0 0]
[0 0]
[0 0]
[1 0]
[0 1]
"""
if not self.is_ambient():
raise ValueError("self is not ambient")
if n >= len(self.groups()):
raise IndexError("index (=%s) too large (max = %s)"%(n, len(self.groups())))
G = self.groups()[n]
A = G.modular_abelian_variety()
index = sum([ gp.modular_symbols().cuspidal_subspace().dimension()
for gp in self.groups()[0:n] ])
H = self.Hom(A)
mat = H.matrix_space()(0)
mat.set_block(index, 0, identity_matrix(2*A.dimension()))
return H(Morphism(H, mat))
def is_subvariety_of_ambient_jacobian(self):
"""
Return True if self is (presented as) a subvariety of the ambient
product Jacobian.
Every abelian variety in Sage is a quotient of a subvariety of an
ambient Jacobian product by a finite subgroup.
EXAMPLES::
sage: J0(33).is_subvariety_of_ambient_jacobian()
True
sage: A = J0(33)[0]; A
Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)
sage: A.is_subvariety_of_ambient_jacobian()
True
sage: B, phi = A / A.torsion_subgroup(2)
sage: B
Abelian variety factor of dimension 1 of J0(33)
sage: phi.matrix()
[2 0]
[0 2]
sage: B.is_subvariety_of_ambient_jacobian()
False
"""
try:
return self.__is_sub_ambient
except AttributeError:
self.__is_sub_ambient = (self.lattice().denominator() == 1)
return self.__is_sub_ambient
def ambient_variety(self):
"""
Return the ambient modular abelian variety that contains this
abelian variety. The ambient variety is always a product of
Jacobians of modular curves.
OUTPUT: abelian variety
EXAMPLES::
sage: A = J0(33)[0]; A
Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)
sage: A.ambient_variety()
Abelian variety J0(33) of dimension 3
"""
try:
return self.__ambient_variety
except AttributeError:
A = ModularAbelianVariety(self.groups(), ZZ**(2*self._ambient_dimension()),
self.base_field(), check=False)
self.__ambient_variety = A
return A
def ambient_morphism(self):
"""
Return the morphism from self to the ambient variety. This is
injective if self is natural a subvariety of the ambient product
Jacobian.
OUTPUT: morphism
The output is cached.
EXAMPLES: We compute the ambient structure morphism for an abelian
subvariety of `J_0(33)`::
sage: A,B,C = J0(33)
sage: phi = A.ambient_morphism()
sage: phi.domain()
Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)
sage: phi.codomain()
Abelian variety J0(33) of dimension 3
sage: phi.matrix()
[ 1 1 -2 0 2 -1]
[ 0 3 -2 -1 2 0]
phi is of course injective
::
sage: phi.kernel()
(Finite subgroup with invariants [] over QQ of Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33),
Abelian subvariety of dimension 0 of J0(33))
This is the same as the basis matrix for the lattice corresponding
to self::
sage: A.lattice()
Free module of degree 6 and rank 2 over Integer Ring
Echelon basis matrix:
[ 1 1 -2 0 2 -1]
[ 0 3 -2 -1 2 0]
We compute a non-injective map to an ambient space::
sage: Q,pi = J0(33)/A
sage: phi = Q.ambient_morphism()
sage: phi.matrix()
[ 1 4 1 9 -1 -1]
[ 0 15 0 0 30 -75]
[ 0 0 5 10 -5 15]
[ 0 0 0 15 -15 30]
sage: phi.kernel()[0]
Finite subgroup with invariants [5, 15, 15] over QQ of Abelian variety factor of dimension 2 of J0(33)
"""
try:
return self.__ambient_morphism
except AttributeError:
matrix,_ = self.lattice().basis_matrix()._clear_denom()
phi = Morphism(self.Hom(self.ambient_variety()), matrix)
self.__ambient_morphism = phi
return phi
def is_ambient(self):
"""
Return True if self equals the ambient product Jacobian.
OUTPUT: bool
EXAMPLES::
sage: A,B,C = J0(33)
sage: A.is_ambient()
False
sage: J0(33).is_ambient()
True
sage: (A+B).is_ambient()
False
sage: (A+B+C).is_ambient()
True
"""
try:
return self.__is_ambient
except AttributeError:
pass
L = self.lattice()
self.__is_ambient = (self.lattice() == ZZ**L.degree())
return self.__is_ambient
def dimension(self):
"""
Return the dimension of this abelian variety.
EXAMPLES::
sage: A = J0(23)
sage: A.dimension()
2
"""
return self.lattice().rank() // 2
def rank(self):
"""
Return the rank of the underlying lattice of self.
EXAMPLES::
sage: J = J0(33)
sage: J.rank()
6
sage: J[1]
Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33)
sage: (J[1] * J[1]).rank()
4
"""
return self.lattice().rank()
def degree(self):
"""
Return the degree of this abelian variety, which is the dimension
of the ambient Jacobian product.
EXAMPLES::
sage: A = J0(23)
sage: A.dimension()
2
"""
return self._ambient_dimension()
def endomorphism_ring(self, category=None):
"""
Return the endomorphism ring of self.
OUTPUT: b = self.sturm_bound()
EXAMPLES: We compute a few endomorphism rings::
sage: J0(11).endomorphism_ring()
Endomorphism ring of Abelian variety J0(11) of dimension 1
sage: J0(37).endomorphism_ring()
Endomorphism ring of Abelian variety J0(37) of dimension 2
sage: J0(33)[2].endomorphism_ring()
Endomorphism ring of Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33)
No real computation is done::
sage: J1(123456).endomorphism_ring()
Endomorphism ring of Abelian variety J1(123456) of dimension 423185857
"""
try:
return self.__endomorphism_ring
except AttributeError:
pass
self.__endomorphism_ring = homspace.EndomorphismSubring(self, category=category)
return self.__endomorphism_ring
def sturm_bound(self):
r"""
Return a bound `B` such that all Hecke operators
`T_n` for `n\leq B` generate the Hecke algebra.
OUTPUT: integer
EXAMPLES::
sage: J0(11).sturm_bound()
2
sage: J0(33).sturm_bound()
8
sage: J1(17).sturm_bound()
48
sage: J1(123456).sturm_bound()
1693483008
sage: JH(37,[2,3]).sturm_bound()
7
sage: J1(37).sturm_bound()
228
"""
try:
return self.__sturm_bound
except AttributeError:
B = max([G.sturm_bound(2) for G in self.groups()])
self.__sturm_bound = B
return B
def is_hecke_stable(self):
"""
Return True if self is stable under the Hecke operators of its
ambient Jacobian.
OUTPUT: bool
EXAMPLES::
sage: J0(11).is_hecke_stable()
True
sage: J0(33)[2].is_hecke_stable()
True
sage: J0(33)[0].is_hecke_stable()
False
sage: (J0(33)[0] + J0(33)[1]).is_hecke_stable()
True
"""
try:
return self._is_hecke_stable
except AttributeError:
pass
#b = self.modular_symbols().sturm_bound()
b = max([ m.sturm_bound() for m in self._ambient_modular_symbols_spaces() ])
J = self.ambient_variety()
L = self.lattice()
B = self.lattice().basis()
for n in prime_range(1,b+1):
Tn_matrix = J.hecke_operator(n).matrix()
for v in B:
if not (v*Tn_matrix in L):
self._is_hecke_stable = False
return False
self._is_hecke_stable = True
return True
def is_subvariety(self, other):
"""
Return True if self is a subvariety of other as they sit in a
common ambient modular Jacobian. In particular, this function will
only return True if self and other have exactly the same ambient
Jacobians.
EXAMPLES::
sage: J = J0(37); J
Abelian variety J0(37) of dimension 2
sage: A = J[0]; A
Simple abelian subvariety 37a(1,37) of dimension 1 of J0(37)
sage: A.is_subvariety(A)
True
sage: A.is_subvariety(J)
True
"""
if not is_ModularAbelianVariety(other):
return False
if self is other:
return True
if self.groups() != other.groups():
return False
L = self.lattice()
M = other.lattice()
# self is an abelian subvariety of other if and only if
# 1. L is a subset of M (so the abelian subvarieties of
# the ambient J are equal), and
# 2. L is relatively saturated in M, i.e., M/L is
# torsion free.
if not L.is_submodule(M):
return False
# To determine if L is relatively saturated we compute the
# intersection of M with (L tensor Q) and see if that equals
# L.
return L.change_ring(QQ).intersection(M) == L
def change_ring(self, R):
"""
Change the base ring of this modular abelian variety.
EXAMPLES::
sage: A = J0(23)
sage: A.change_ring(QQ)
Abelian variety J0(23) of dimension 2
"""
return ModularAbelianVariety(self.groups(), self.lattice(), R, check=False)
def level(self):
"""
Return the level of this modular abelian variety, which is an
integer N (usually minimal) such that this modular abelian variety
is a quotient of `J_1(N)`. In the case that the ambient
variety of self is a product of Jacobians, return the LCM of their
levels.
EXAMPLES::
sage: J1(5077).level()
5077
sage: JH(389,[4]).level()
389
sage: (J0(11)*J0(17)).level()
187
"""
try:
return self.__level
except AttributeError:
self.__level = LCM([G.level() for G in self.groups()])
return self.__level
def newform_level(self, none_if_not_known=False):
"""
Write self as a product (up to isogeny) of newform abelian
varieties `A_f`. Then this function return the least
common multiple of the levels of the newforms `f`, along
with the corresponding group or list of groups (the groups do not
appear with multiplicity).
INPUT:
- ``none_if_not_known`` - (default: False) if True,
return None instead of attempting to compute the newform level, if
it isn't already known. This None result is not cached.
OUTPUT: integer group or list of distinct groups
EXAMPLES::
sage: J0(33)[0].newform_level()
(11, Congruence Subgroup Gamma0(33))
sage: J0(33)[0].newform_level(none_if_not_known=True)
(11, Congruence Subgroup Gamma0(33))
Here there are multiple groups since there are in fact multiple
newforms::
sage: (J0(11) * J1(13)).newform_level()
(143, [Congruence Subgroup Gamma0(11), Congruence Subgroup Gamma1(13)])
"""
try:
return self.__newform_level
except AttributeError:
if none_if_not_known:
return None
N = [A.newform_level() for A in self.decomposition()]
level = LCM([z[0] for z in N])
groups = sorted(set([z[1] for z in N]))
if len(groups) == 1:
groups = groups[0]
self.__newform_level = level, groups
return self.__newform_level
def zero_subvariety(self):
"""
Return the zero subvariety of self.
EXAMPLES::
sage: J = J0(37)
sage: J.zero_subvariety()
Simple abelian subvariety of dimension 0 of J0(37)
sage: J.zero_subvariety().level()
37
sage: J.zero_subvariety().newform_level()
(1, [])
"""
try:
return self.__zero_subvariety
except AttributeError:
lattice = (ZZ**(2*self.degree())).zero_submodule()
A = ModularAbelianVariety(self.groups(), lattice, self.base_field(),
is_simple=True, check=False)
self.__zero_subvariety = A
return A
###############################################################################
# Properties of the ambient product of Jacobians
###############################################################################
def _ambient_repr(self):
"""
OUTPUT: string
EXAMPLES::
sage: (J0(33)*J1(11))._ambient_repr()
'J0(33) x J1(11)'
"""
v = []
for G in self.groups():
if is_Gamma0(G):
v.append('J0(%s)'%G.level())
elif is_Gamma1(G):
v.append('J1(%s)'%G.level())
elif is_GammaH(G):
v.append('JH(%s,%s)'%(G.level(), G._generators_for_H()))
return ' x '.join(v)
def _ambient_latex_repr(self):
"""
Return Latex representation of the ambient product.
OUTPUT: string
EXAMPLES::
sage: (J0(11) * J0(33))._ambient_latex_repr()
'J_0(11) \\times J_0(33)'
"""
v = []
for G in self.groups():
if is_Gamma0(G):
v.append('J_0(%s)'%G.level())
elif is_Gamma1(G):
v.append('J_1(%s)'%G.level())
elif is_GammaH(G):
v.append('J_H(%s,%s)'%(G.level(), G._generators_for_H()))
return ' \\times '.join(v)
def _ambient_lattice(self):
"""
Return free lattice of rank twice the degree of self. This is the
lattice corresponding to the ambient product Jacobian.
OUTPUT: lattice
EXAMPLES: We compute the ambient lattice of a product::
sage: (J0(33)*J1(11))._ambient_lattice()
Ambient free module of rank 8 over the principal ideal domain Integer Ring
We compute the ambient lattice of an abelian subvariety
`J_0(33)`, which is the same as the lattice for the
`J_0(33)` itself::
sage: A = J0(33)[0]; A._ambient_lattice()
Ambient free module of rank 6 over the principal ideal domain Integer Ring
sage: J0(33)._ambient_lattice()
Ambient free module of rank 6 over the principal ideal domain Integer Ring
"""
try:
return self.__ambient_lattice
except AttributeError:
self.__ambient_lattice = ZZ**(2*self.degree())
return self.__ambient_lattice
def _ambient_modular_symbols_spaces(self):
"""
Return a tuple of the ambient cuspidal modular symbols spaces that
make up the Jacobian product that contains self.
OUTPUT: tuple of cuspidal modular symbols spaces
EXAMPLES::
sage: (J0(11) * J0(33))._ambient_modular_symbols_spaces()
(Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 3 for Gamma_0(11) of weight 2 with sign 0 over Rational Field,
Modular Symbols subspace of dimension 6 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field)
sage: (J0(11) * J0(33)[0])._ambient_modular_symbols_spaces()
(Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 3 for Gamma_0(11) of weight 2 with sign 0 over Rational Field,
Modular Symbols subspace of dimension 6 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field)
"""
if not self.is_ambient():
return self.ambient_variety()._ambient_modular_symbols_spaces()
try:
return self.__ambient_modular_symbols_spaces
except AttributeError:
X = tuple([ModularSymbols(G).cuspidal_subspace() for G in self.groups()])
self.__ambient_modular_symbols_spaces = X
return X
def _ambient_modular_symbols_abvars(self):
"""
Return a tuple of the ambient modular symbols abelian varieties
that make up the Jacobian product that contains self.
OUTPUT: tuple of modular symbols abelian varieties
EXAMPLES::
sage: (J0(11) * J0(33))._ambient_modular_symbols_abvars()
(Abelian variety J0(11) of dimension 1, Abelian variety J0(33) of dimension 3)
"""
if not self.is_ambient():
return self.ambient_variety()._ambient_modular_symbols_abvars()
try:
return self.__ambient_modular_symbols_abvars
except AttributeError:
X = tuple([ModularAbelianVariety_modsym(M) for M in self._ambient_modular_symbols_spaces()])
self.__ambient_modular_symbols_abvars = X
return X
def _ambient_dimension(self):
"""
Return the dimension of the ambient Jacobian product.
EXAMPLES::
sage: A = J0(37) * J1(13); A
Abelian variety J0(37) x J1(13) of dimension 4
sage: A._ambient_dimension()
4
sage: B = A[0]; B
Simple abelian subvariety 13aG1(1,13) of dimension 2 of J0(37) x J1(13)
sage: B._ambient_dimension()
4
This example is fast because it implicitly calls
_ambient_dimension.
::
sage: J0(902834082394)
Abelian variety J0(902834082394) of dimension 113064825881
"""
try:
return self.__ambient_dimension
except AttributeError:
d = sum([G.dimension_cusp_forms(2) for G in self.groups()], Integer(0))
self.__ambient_dimension = d
return d
def _ambient_hecke_matrix_on_modular_symbols(self, n):
r"""
Return block direct sum of the matrix of the Hecke operator
`T_n` acting on each of the ambient modular symbols
spaces.
INPUT:
- ``n`` - an integer `\geq 1`.
OUTPUT: a matrix
EXAMPLES::
sage: (J0(11) * J1(13))._ambient_hecke_matrix_on_modular_symbols(2)
[-2 0 0 0 0 0]
[ 0 -2 0 0 0 0]
[ 0 0 -2 0 -1 1]
[ 0 0 1 -1 0 -1]
[ 0 0 1 1 -2 0]
[ 0 0 0 1 -1 -1]
"""
if not self.is_ambient():
return self.ambient_variety()._ambient_hecke_matrix_on_modular_symbols(n)
try:
return self.__ambient_hecke_matrix_on_modular_symbols[n]
except AttributeError:
self.__ambient_hecke_matrix_on_modular_symbols = {}
except KeyError:
pass
M = self._ambient_modular_symbols_spaces()
if len(M) == 0:
return matrix(QQ,0)
T = M[0].hecke_matrix(n)
for i in range(1,len(M)):
T = T.block_sum(M[i].hecke_matrix(n))
self.__ambient_hecke_matrix_on_modular_symbols[n] = T
return T
###############################################################################
# Rational and Integral Homology
###############################################################################
def _rational_homology_space(self):
"""
Return the rational homology of this modular abelian variety.
EXAMPLES::
| sage: J._rational_homology_space()
Vector space of dimension 2 over Rational Field
The result is cached::
sage: J._rational_homology_space() is J._rational_homology_space()
True
"""
try:
return self.__rational_homology_space
except AttributeError:
HQ = self.rational_homology().free_module()
self.__rational_homology_space = HQ
return HQ
def homology(self, base_ring=ZZ):
"""
Return the homology of this modular abelian variety.
.. warning::
For efficiency reasons the basis of the integral homology
need not be the same as the basis for the rational
homology.
EXAMPLES::
sage: J0(389).homology(GF(7))
Homology with coefficients in Finite Field of size 7 of Abelian variety J0(389) of dimension 32
sage: J0(389).homology(QQ)
Rational Homology of Abelian variety J0(389) of dimension 32
sage: J0(389).homology(ZZ)
Integral Homology of Abelian variety J0(389) of dimension 32
"""
try:
return self._homology[base_ring]
except AttributeError:
self._homology = {}
except KeyError:
pass
if base_ring == ZZ:
H = homology.IntegralHomology(self)
elif base_ring == QQ:
H = homology.RationalHomology(self)
else:
H = homology.Homology_over_base(self, base_ring)
self._homology[base_ring] = H
return H
def integral_homology(self):
"""
Return the integral homology of this modular abelian variety.
EXAMPLES::
sage: H = J0(43).integral_homology(); H
Integral Homology of Abelian variety J0(43) of dimension 3
sage: H.rank()
6
sage: H = J1(17).integral_homology(); H
Integral Homology of Abelian variety J1(17) of dimension 5
sage: H.rank()
10
If you just ask for the rank of the homology, no serious
calculations are done, so the following is fast::
sage: H = J0(50000).integral_homology(); H
Integral Homology of Abelian variety J0(50000) of dimension 7351
sage: H.rank()
14702
A product::
sage: H = (J0(11) * J1(13)).integral_homology()
sage: H.hecke_operator(2)
Hecke operator T_2 on Integral Homology of Abelian variety J0(11) x J1(13) of dimension 3
sage: H.hecke_operator(2).matrix()
[-2 0 0 0 0 0]
[ 0 -2 0 0 0 0]
[ 0 0 -2 0 -1 1]
[ 0 0 1 -1 0 -1]
[ 0 0 1 1 -2 0]
[ 0 0 0 1 -1 -1]
"""
return self.homology(ZZ)
def rational_homology(self):
"""
Return the rational homology of this modular abelian variety.
EXAMPLES::
sage: H = J0(37).rational_homology(); H
Rational Homology of Abelian variety J0(37) of dimension 2
sage: H.rank()
4
sage: H.base_ring()
Rational Field
sage: H = J1(17).rational_homology(); H
Rational Homology of Abelian variety J1(17) of dimension 5
sage: H.rank()
10
sage: H.base_ring()
Rational Field
"""
return self.homology(QQ)
###############################################################################
# L-series
###############################################################################
def lseries(self):
"""
Return the complex `L`-series of this modular abelian
variety.
EXAMPLES::
sage: A = J0(37)
sage: A.lseries()
Complex L-series attached to Abelian variety J0(37) of dimension 2
"""
try:
return self.__lseries
except AttributeError:
pass
self.__lseries = lseries.Lseries_complex(self)
return self.__lseries
def padic_lseries(self, p):
"""
Return the `p`-adic `L`-series of this modular
abelian variety.
EXAMPLES::
sage: A = J0(37)
sage: A.padic_lseries(7)
7-adic L-series attached to Abelian variety J0(37) of dimension 2
"""
p = int(p)
try:
return self.__lseries_padic[p]
except AttributeError:
self.__lseries_padic = {}
except KeyError:
pass
self.__lseries_padic[p] = lseries.Lseries_padic(self, p)
return self.__lseries_padic[p]
###############################################################################
# Hecke Operators
###############################################################################
def hecke_operator(self, n):
"""
Return the `n^{th}` Hecke operator on the modular abelian
variety, if this makes sense [[elaborate]]. Otherwise raise a
ValueError.
EXAMPLES: We compute `T_2` on `J_0(37)`.
::
sage: t2 = J0(37).hecke_operator(2); t2
Hecke operator T_2 on Abelian variety J0(37) of dimension 2
sage: t2.charpoly().factor()
x * (x + 2)
sage: t2.index()
2
Note that there is no matrix associated to Hecke operators on
modular abelian varieties. For a matrix, instead consider, e.g.,
the Hecke operator on integral or rational homology.
::
sage: t2.action_on_homology().matrix()
[-1 1 1 -1]
[ 1 -1 1 0]
[ 0 0 -2 1]
[ 0 0 0 0]
"""
try:
return self._hecke_operator[n]
except AttributeError:
self._hecke_operator = {}
except KeyError:
pass
Tn = HeckeOperator(self, n)
self._hecke_operator[n] = Tn
return Tn
def hecke_polynomial(self, n, var='x'):
r"""
Return the characteristic polynomial of the `n^{th}` Hecke
operator `T_n` acting on self. Raises an ArithmeticError
if self is not Hecke equivariant.
INPUT:
- ``n`` - integer `\geq 1`
- ``var`` - string (default: 'x'); valid variable
name
EXAMPLES::
sage: J0(33).hecke_polynomial(2)
x^3 + 3*x^2 - 4
sage: f = J0(33).hecke_polynomial(2, 'y'); f
y^3 + 3*y^2 - 4
sage: f.parent()
Univariate Polynomial Ring in y over Rational Field
sage: J0(33)[2].hecke_polynomial(3)
x + 1
sage: J0(33)[0].hecke_polynomial(5)
x - 1
sage: J0(33)[0].hecke_polynomial(11)
x - 1
sage: J0(33)[0].hecke_polynomial(3)
Traceback (most recent call last):
...
ArithmeticError: subspace is not invariant under matrix
"""
n = Integer(n)
if n <= 0:
raise ValueError("n must be a positive integer")
key = (n,var)
try:
return self.__hecke_polynomial[key]
except AttributeError:
self.__hecke_polynomial = {}
except KeyError:
pass
f = self._compute_hecke_polynomial(n, var=var)
self.__hecke_polynomial[key] = f
return f
def _compute_hecke_polynomial(self, n, var='x'):
"""
Return the Hecke polynomial of index `n` in terms of the
given variable.
INPUT:
- ``n`` - positive integer
- ``var`` - string (default: 'x')
EXAMPLES::
sage: A = J0(33)*J0(11)
sage: A._compute_hecke_polynomial(2)
x^4 + 5*x^3 + 6*x^2 - 4*x - 8
"""
return self.hecke_operator(n).charpoly(var=var)
def _integral_hecke_matrix(self, n):
"""
Return the matrix of the Hecke operator `T_n` acting on
the integral homology of this modular abelian variety, if the
modular abelian variety is stable under `T_n`. Otherwise,
raise an ArithmeticError.
EXAMPLES::
sage: A = J0(23)
sage: t = A._integral_hecke_matrix(2); t
[ 0 1 -1 0]
[ 0 1 -1 1]
[-1 2 -2 1]
[-1 1 0 -1]
sage: t.parent()
Full MatrixSpace of 4 by 4 dense matrices over Integer Ring
"""
A = self._ambient_hecke_matrix_on_modular_symbols(n)
return A.restrict(self.lattice())
def _rational_hecke_matrix(self, n):
r"""
Return the matrix of the Hecke operator `T_n` acting on
the rational homology `H_1(A,\QQ)` of this modular
abelian variety, if this action is defined. Otherwise, raise an
ArithmeticError.
EXAMPLES::
sage: A = J0(23)
sage: t = A._rational_hecke_matrix(2); t
[ 0 1 -1 0]
[ 0 1 -1 1]
[-1 2 -2 1]
[-1 1 0 -1]
sage: t.parent()
Full MatrixSpace of 4 by 4 dense matrices over Rational Field
"""
return self._integral_hecke_matrix(n)
###############################################################################
# Subgroups
###############################################################################
def qbar_torsion_subgroup(self):
r"""
Return the group of all points of finite order in the algebraic
closure of this abelian variety.
EXAMPLES::
sage: T = J0(33).qbar_torsion_subgroup(); T
Group of all torsion points in QQbar on Abelian variety J0(33) of dimension 3
The field of definition is the same as the base field of the
abelian variety.
::
sage: T.field_of_definition()
Rational Field
On the other hand, T is a module over `\ZZ`.
::
sage: T.base_ring()
Integer Ring
"""
try:
return self.__qbar_torsion_subgroup
except AttributeError:
G = QQbarTorsionSubgroup(self)
self.__qbar_torsion_subgroup = G
return G
def rational_torsion_subgroup(self):
"""
Return the maximal torsion subgroup of self defined over QQ.
EXAMPLES::
sage: J = J0(33)
sage: A = J.new_subvariety()
sage: A
Abelian subvariety of dimension 1 of J0(33)
sage: t = A.rational_torsion_subgroup()
sage: t.multiple_of_order()
4
sage: t.divisor_of_order()
4
sage: t.order()
4
sage: t.gens()
[[(1/2, 0, 0, -1/2, 0, 0)], [(0, 0, 1/2, 0, 1/2, -1/2)]]
sage: t
Torsion subgroup of Abelian subvariety of dimension 1 of J0(33)
"""
try:
return self.__rational_torsion_subgroup
except AttributeError:
T = RationalTorsionSubgroup(self)
self.__rational_torsion_subgroup = T
return T
def cuspidal_subgroup(self):
"""
Return the cuspidal subgroup of this modular abelian variety. This
is the subgroup generated by rational cusps.
EXAMPLES::
sage: J = J0(54)
sage: C = J.cuspidal_subgroup()
sage: C.gens()
[[(1/3, 0, 0, 0, 0, 1/3, 0, 2/3)], [(0, 1/3, 0, 0, 0, 2/3, 0, 1/3)], [(0, 0, 1/9, 1/9, 1/9, 1/9, 1/9, 2/9)], [(0, 0, 0, 1/3, 0, 1/3, 0, 0)], [(0, 0, 0, 0, 1/3, 1/3, 0, 1/3)], [(0, 0, 0, 0, 0, 0, 1/3, 2/3)]]
sage: C.invariants()
[3, 3, 3, 3, 3, 9]
sage: J1(13).cuspidal_subgroup()
Finite subgroup with invariants [19, 19] over QQ of Abelian variety J1(13) of dimension 2
sage: A = J0(33)[0]
sage: A.cuspidal_subgroup()
Finite subgroup with invariants [5] over QQ of Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)
"""
try:
return self._cuspidal_subgroup
except AttributeError:
if not self.is_subvariety_of_ambient_jacobian():
raise ValueError("self must be a subvariety of the ambient variety")
if self.is_ambient():
T = self._ambient_cuspidal_subgroup(rational_only=False)
else:
T = self.ambient_variety().cuspidal_subgroup().intersection(self)
self._cuspidal_subgroup = T
return T
def _ambient_cuspidal_subgroup(self, rational_only=False, rational_subgroup=False):
"""
EXAMPLES::
sage: (J1(13)*J0(11))._ambient_cuspidal_subgroup()
Finite subgroup with invariants [19, 95] over QQ of Abelian variety J1(13) x J0(11) of dimension 3
sage: (J0(33))._ambient_cuspidal_subgroup()
Finite subgroup with invariants [10, 10] over QQ of Abelian variety J0(33) of dimension 3
sage: (J0(33)*J0(33))._ambient_cuspidal_subgroup()
Finite subgroup with invariants [10, 10, 10, 10] over QQ of Abelian variety J0(33) x J0(33) of dimension 6
"""
n = 2 * self.degree()
i = 0
lattice = (ZZ**n).zero_submodule()
if rational_subgroup:
CS = RationalCuspidalSubgroup
elif rational_only:
CS = RationalCuspSubgroup
else:
CS = CuspidalSubgroup
for J in self._ambient_modular_symbols_abvars():
L = CS(J).lattice().basis_matrix()
Z_left = matrix(QQ,L.nrows(),i)
Z_right = matrix(QQ,L.nrows(),n-i-L.ncols())
lattice += (Z_left.augment(L).augment(Z_right)).row_module(ZZ)
i += L.ncols()
return FiniteSubgroup_lattice(self, lattice, field_of_definition=self.base_field())
def shimura_subgroup(self):
r"""
Return the Shimura subgroup of this modular abelian variety. This is
the kernel of $J_0(N) \rightarrow J_1(N)$ under the natural map.
Here we compute the Shimura subgroup as the kernel of
$J_0(N) \rightarrow J_0(Np)$ where the map is the difference between the
two degeneracy maps.
EXAMPLES::
sage: J=J0(11)
sage: J.shimura_subgroup()
Finite subgroup with invariants [5] over QQ of Abelian variety J0(11) of dimension 1
sage: J=J0(17)
sage: G=J.cuspidal_subgroup(); G
Finite subgroup with invariants [4] over QQ of Abelian variety J0(17) of dimension 1
sage: S=J.shimura_subgroup(); S
Finite subgroup with invariants [4] over QQ of Abelian variety J0(17) of dimension 1
sage: G.intersection(S)
Finite subgroup with invariants [2] over QQ of Abelian variety J0(17) of dimension 1
sage: J=J0(33)
sage: A=J.decomposition()[0]
sage: A.shimura_subgroup()
Finite subgroup with invariants [5] over QQ of Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)
sage: J.shimura_subgroup()
Finite subgroup with invariants [10] over QQ of Abelian variety J0(33) of dimension 3
"""
N=self.level()
J=self.ambient_variety()
for p in prime_range(100):
if N%p!=0:
break
phi=J.degeneracy_map(N*p,1)
phip=J.degeneracy_map(N*p,p)
SIG = (phi-phip).kernel()
assert SIG[1].dimension()==0, "The intersection should have dimension 0"
return self.intersection(SIG[0])
def rational_cusp_subgroup(self):
r"""
Return the subgroup of this modular abelian variety generated by
rational cusps.
This is a subgroup of the group of rational points in the cuspidal
subgroup.
.. warning::
This is only currently implemented for
`\Gamma_0(N)`.
EXAMPLES::
sage: J = J0(54)
sage: CQ = J.rational_cusp_subgroup(); CQ
Finite subgroup with invariants [3, 3, 9] over QQ of Abelian variety J0(54) of dimension 4
sage: CQ.gens()
[[(1/3, 0, 0, 1/3, 2/3, 1/3, 0, 1/3)], [(0, 0, 1/9, 1/9, 7/9, 7/9, 1/9, 8/9)], [(0, 0, 0, 0, 0, 0, 1/3, 2/3)]]
sage: factor(CQ.order())
3^4
sage: CQ.invariants()
[3, 3, 9]
In this example the rational cuspidal subgroup and the cuspidal
subgroup differ by a lot.
::
sage: J = J0(49)
sage: J.cuspidal_subgroup()
Finite subgroup with invariants [2, 14] over QQ of Abelian variety J0(49) of dimension 1
sage: J.rational_cusp_subgroup()
Finite subgroup with invariants [2] over QQ of Abelian variety J0(49) of dimension 1
Note that computation of the rational cusp subgroup isn't
implemented for `\Gamma_1`.
::
sage: J = J1(13)
sage: J.cuspidal_subgroup()
Finite subgroup with invariants [19, 19] over QQ of Abelian variety J1(13) of dimension 2
sage: J.rational_cusp_subgroup()
Traceback (most recent call last):
...
NotImplementedError: computation of rational cusps only implemented in Gamma0 case.
"""
try:
return self._rational_cusp_subgroup
except AttributeError:
if not self.is_subvariety_of_ambient_jacobian():
raise ValueError("self must be a subvariety of the ambient variety")
if self.is_ambient():
T = self._ambient_cuspidal_subgroup(rational_only=True)
else:
T = self.ambient_variety().rational_cusp_subgroup().intersection(self)
self._rational_cusp_subgroup = T
return T
def rational_cuspidal_subgroup(self):
r"""
Return the rational subgroup of the cuspidal subgroup of this
modular abelian variety.
This is a subgroup of the group of rational points in the
cuspidal subgroup.
.. warning::
This is only currently implemented for
`\Gamma_0(N)`.
EXAMPLES::
sage: J = J0(54)
sage: CQ = J.rational_cuspidal_subgroup(); CQ
Finite subgroup with invariants [3, 3, 9] over QQ of Abelian variety J0(54) of dimension 4
sage: CQ.gens()
[[(1/3, 0, 0, 1/3, 2/3, 1/3, 0, 1/3)], [(0, 0, 1/9, 1/9, 7/9, 7/9, 1/9, 8/9)], [(0, 0, 0, 0, 0, 0, 1/3, 2/3)]]
sage: factor(CQ.order())
3^4
sage: CQ.invariants()
[3, 3, 9]
In this example the rational cuspidal subgroup and the cuspidal
subgroup differ by a lot.
::
sage: J = J0(49)
sage: J.cuspidal_subgroup()
Finite subgroup with invariants [2, 14] over QQ of Abelian variety J0(49) of dimension 1
sage: J.rational_cuspidal_subgroup()
Finite subgroup with invariants [2] over QQ of Abelian variety J0(49) of dimension 1
Note that computation of the rational cusp subgroup isn't
implemented for `\Gamma_1`.
::
sage: J = J1(13)
sage: J.cuspidal_subgroup()
Finite subgroup with invariants [19, 19] over QQ of Abelian variety J1(13) of dimension 2
sage: J.rational_cuspidal_subgroup()
Traceback (most recent call last):
...
NotImplementedError: only implemented when group is Gamma0
"""
try:
return self._rational_cuspidal_subgroup
except AttributeError:
if not self.is_subvariety_of_ambient_jacobian():
raise ValueError("self must be a subvariety of the ambient variety")
if self.is_ambient():
T = self._ambient_cuspidal_subgroup(rational_subgroup=True)
else:
T = self.ambient_variety().rational_cuspidal_subgroup().intersection(self)
self._rational_cuspidal_subgroup = T
return T
def zero_subgroup(self):
"""
Return the zero subgroup of this modular abelian variety, as a
finite group.
EXAMPLES::
sage: A =J0(54); G = A.zero_subgroup(); G
Finite subgroup with invariants [] over QQ of Abelian variety J0(54) of dimension 4
sage: G.is_subgroup(A)
True
"""
try:
return self.__zero_subgroup
except AttributeError:
G = FiniteSubgroup_lattice(self, self.lattice(), field_of_definition=QQ)
self.__zero_subgroup = G
return G
def finite_subgroup(self, X, field_of_definition=None, check=True):
"""
Return a finite subgroup of this modular abelian variety.
INPUT:
- ``X`` - list of elements of other finite subgroups
of this modular abelian variety or elements that coerce into the
rational homology (viewed as a rational vector space); also X could
be a finite subgroup itself that is contained in this abelian
variety.
- ``field_of_definition`` - (default: None) field
over which this group is defined. If None try to figure out the
best base field.
OUTPUT: a finite subgroup of a modular abelian variety
EXAMPLES::
sage: J = J0(11)
sage: J.finite_subgroup([[1/5,0], [0,1/3]])
Finite subgroup with invariants [15] over QQbar of Abelian variety J0(11) of dimension 1
::
sage: J = J0(33); C = J[0].cuspidal_subgroup(); C
Finite subgroup with invariants [5] over QQ of Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)
sage: J.finite_subgroup([[0,0,0,0,0,1/6]])
Finite subgroup with invariants [6] over QQbar of Abelian variety J0(33) of dimension 3
sage: J.finite_subgroup(C)
Finite subgroup with invariants [5] over QQ of Abelian variety J0(33) of dimension 3
"""
if isinstance(X, (list, tuple)):
X = self._ambient_lattice().span(X)
elif isinstance(X, FiniteSubgroup):
if field_of_definition is None:
field_of_definition = X.field_of_definition()
A = X.abelian_variety()
if A.groups() != self.groups():
raise ValueError("ambient product Jacobians must be equal")
if A == self:
X = X.lattice()
else:
if X.is_subgroup(self):
X = (X.lattice() + self.lattice()).intersection(self.vector_space())
else:
raise ValueError("X must be a subgroup of self.")
if field_of_definition is None:
field_of_definition = QQbar
else:
field_of_definition = field_of_definition
return FiniteSubgroup_lattice(self, X, field_of_definition=field_of_definition, check=check)
def torsion_subgroup(self, n):
"""
If n is an integer, return the subgroup of points of order n.
Return the `n`-torsion subgroup of elements of order
dividing `n` of this modular abelian variety `A`,
i.e., the group `A[n]`.
EXAMPLES::
sage: J1(13).torsion_subgroup(19)
Finite subgroup with invariants [19, 19, 19, 19] over QQ of Abelian variety J1(13) of dimension 2
::
sage: A = J0(23)
sage: G = A.torsion_subgroup(5); G
Finite subgroup with invariants [5, 5, 5, 5] over QQ of Abelian variety J0(23) of dimension 2
sage: G.order()
625
sage: G.gens()
[[(1/5, 0, 0, 0)], [(0, 1/5, 0, 0)], [(0, 0, 1/5, 0)], [(0, 0, 0, 1/5)]]
sage: A = J0(23)
sage: A.torsion_subgroup(2).order()
16
"""
try:
return self.__torsion_subgroup[n]
except KeyError:
pass
except AttributeError:
self.__torsion_subgroup = {}
lattice = self.lattice().scale(1/Integer(n))
H = FiniteSubgroup_lattice(self, lattice, field_of_definition=self.base_field())
self.__torsion_subgroup[n] = H
return H
###############################################################################
# Decomposition
###############################################################################
def degen_t(self, none_if_not_known=False):
"""
If this abelian variety is obtained via decomposition then it gets
labeled with the newform label along with some information about
degeneracy maps. In particular, the label ends in a pair
`(t,N)`, where `N` is the ambient level and
`t` is an integer that divides the quotient of `N`
by the newform level. This function returns the tuple
`(t,N)`, or raises a ValueError if self isn't simple.
.. note::
It need not be the case that self is literally equal to the
image of the newform abelian variety under the `t^{th}`
degeneracy map. See the documentation for the label method
for more details.
INPUT:
- ``none_if_not_known`` - (default: False) - if
True, return None instead of attempting to compute the degen map's
`t`, if it isn't known. This None result is not cached.
OUTPUT: a pair (integer, integer)
EXAMPLES::
sage: D = J0(33).decomposition(); D
[
Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33),
Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33),
Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33)
]
sage: D[0].degen_t()
(1, 33)
sage: D[1].degen_t()
(3, 33)
sage: D[2].degen_t()
(1, 33)
sage: J0(33).degen_t()
Traceback (most recent call last):
...
ValueError: self must be simple
"""
try:
return self.__degen_t
except AttributeError:
if none_if_not_known:
return None
elif self.dimension() > 0 and self.is_simple():
self.__degen_t = self.decomposition()[0].degen_t()
return self.__degen_t
raise ValueError("self must be simple")
def isogeny_number(self, none_if_not_known=False):
"""
Return the number (starting at 0) of the isogeny class of new
simple abelian varieties that self is in. If self is not simple,
raises a ValueError exception.
INPUT:
- ``none_if_not_known`` - bool (default: False); if
True then this function may return None instead of True of False if
we don't already know the isogeny number of self.
EXAMPLES: We test the none_if_not_known flag first::
sage: J0(33).isogeny_number(none_if_not_known=True) is None
True
Of course, `J_0(33)` is not simple, so this function
raises a ValueError::
sage: J0(33).isogeny_number()
Traceback (most recent call last):
...
ValueError: self must be simple
Each simple factor has isogeny number 1, since that's the number at
which the factor is new.
::
sage: J0(33)[1].isogeny_number()
0
sage: J0(33)[2].isogeny_number()
0
Next consider `J_0(37)` where there are two distinct
newform factors::
sage: J0(37)[1].isogeny_number()
1
"""
try:
return self.__isogeny_number
except AttributeError:
if none_if_not_known:
return None
elif self.is_simple():
self.__isogeny_number = self.decomposition()[0].isogeny_number()
return self.__isogeny_number
else:
raise ValueError("self must be simple")
def is_simple(self, none_if_not_known=False):
"""
Return whether or not this modular abelian variety is simple, i.e.,
has no proper nonzero abelian subvarieties.
INPUT:
- ``none_if_not_known`` - bool (default: False); if
True then this function may return None instead of True of False if
we don't already know whether or not self is simple.
EXAMPLES::
sage: J0(5).is_simple(none_if_not_known=True) is None # this may fail if J0(5) comes up elsewhere...
True
sage: J0(33).is_simple()
False
sage: J0(33).is_simple(none_if_not_known=True)
False
sage: J0(33)[1].is_simple()
True
sage: J1(17).is_simple()
False
"""
try:
return self.__is_simple
except AttributeError:
if none_if_not_known:
return None
self.__is_simple = len(self.decomposition()) <= 1
return self.__is_simple
def decomposition(self, simple=True, bound=None):
"""
Return a sequence of abelian subvarieties of self that are all
simple, have finite intersection and sum to self.
INPUT: simple- bool (default: True) if True, all factors are
simple. If False, each factor returned is isogenous to a power of a
simple and the simples in each factor are distinct.
- ``bound`` - int (default: None) if given, only use
Hecke operators up to this bound when decomposing. This can give
wrong answers, so use with caution!
EXAMPLES::
sage: m = ModularSymbols(11).cuspidal_submodule()
sage: d1 = m.degeneracy_map(33,1).matrix(); d3=m.degeneracy_map(33,3).matrix()
sage: w = ModularSymbols(33).submodule((d1 + d3).image(), check=False)
sage: A = w.abelian_variety(); A
Abelian subvariety of dimension 1 of J0(33)
sage: D = A.decomposition(); D
[
Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33)
]
sage: D[0] == A
True
sage: B = A + J0(33)[0]; B
Abelian subvariety of dimension 2 of J0(33)
sage: dd = B.decomposition(simple=False); dd
[
Abelian subvariety of dimension 2 of J0(33)
]
sage: dd[0] == B
True
sage: dd = B.decomposition(); dd
[
Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33),
Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33)
]
sage: sum(dd) == B
True
We decompose a product of two Jacobians::
sage: (J0(33) * J0(11)).decomposition()
[
Simple abelian subvariety 11a(1,11) of dimension 1 of J0(33) x J0(11),
Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33) x J0(11),
Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33) x J0(11),
Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33) x J0(11)
]
"""
try:
return self.__decomposition[(simple, bound)]
except KeyError:
pass
except AttributeError:
self.__decomposition = {}
if self.is_ambient():
# Decompose each piece, then lift
if len(self.groups()) == 0:
D = []
elif len(self.groups()) == 1:
D = ModularAbelianVariety_modsym(ModularSymbols(self.groups()[0], sign=0).cuspidal_submodule()).decomposition(simple=simple, bound=bound)
else:
# Decompose each ambient modular symbols factor.
#X = [ModularAbelianVariety_modsym(ModularSymbols(G,sign=0).cuspidal_submodule()) for G in self.groups()]
from abvar_ambient_jacobian import ModAbVar_ambient_jacobian_class
X = [ModAbVar_ambient_jacobian_class(G) for G in self.groups()]
E = [A.decomposition(simple=simple, bound=bound) for A in X]
i = 0
n = 2*self.dimension()
# Now lift each factor of the decomposition to self.
G = self.groups()
D = []
K = self.base_field()
for C in E:
for B in C:
L = B.lattice().basis_matrix()
if simple:
is_simple = True
else:
is_simple = None
lattice = matrix(QQ,L.nrows(),i).augment(L).augment(matrix(QQ,L.nrows(),n-i-L.ncols())).row_module(ZZ)
D.append(ModularAbelianVariety(G, lattice, K, is_simple=is_simple, newform_level=B.newform_level(),
isogeny_number=B.isogeny_number(none_if_not_known=True),
number=B.degen_t(none_if_not_known=True)))
if len(C) > 0:
i += L.ncols()
elif not simple:
# In this case decompose the ambient space into powers of
# simple abelian varieties (i.e. with
# \code{simple=False)}, and then intersect the lattice
# corresponding to self with each of these factors.
D = []
L = self.lattice()
groups = self.groups()
K = self.base_ring()
for X in self.ambient_variety().decomposition(simple=False):
lattice = L.intersection(X.vector_space())
if lattice.rank() > 0:
the_factor = ModularAbelianVariety(groups, lattice, K, is_simple=X.is_simple(none_if_not_known=True), newform_level=X.newform_level(), isogeny_number=X.isogeny_number(none_if_not_known=True), number=X.degen_t(none_if_not_known=True))
D.append(the_factor)
else:
# See the documentation for self._classify_ambient_factors
# in order to understand what we're doing here.
I_F, I_E, X = self._classify_ambient_factors(simple=simple, bound=bound)
Z_E = [X[i] for i in I_E]
Z_F = [X[i] for i in I_F]
F = sum(Z_F, self.zero_subvariety())
# Now self is isogenous to the sum of the factors in Z.
# We use this isogeny to obtain a product decomposition of
# self.
if F == self:
# The easy case -- it is already such a decomposition
D = Z_F
else:
# The hard case -- now we have to pull back the
# factorization
# Suppose $B$ is an abelian variety and there is a
# finite degree map $B\to J$, where $J$ is an ambient
# Jacobian. Suppose further that we find abelian
# subvarieties $E$ and $F$ of $J$ such that $E + F =
# J$, $E$ and $F$ have finite intersection, the
# composition $B \to J \to J/E$ is an isogeny, and we
# know an explicit decomposition of $F$. Then we can
# compute a decomposition of $B$ as follows. Let
# $L_E$ and $L_F$ be the lattices corresponding to $E$
# and $F$ inside of $L_J$. Compute a matrix $\Phi$
# representing the composition $L_B \to L_J \to L_F
# \otimes \QQ$, where the map $L_J$ to $L_F\otimes
# \QQ$ is projection onto the second factor in the
# decomposition of $L_J$ as $L_E + L_F$ (up to finite
# index). Finally, for each factor $A_i$ of $F$ with
# lattice $L_{A_i}$, compute the saturation $S_i$ of
# $\Phi^{-1}(L_{A_i})$. Then the $S_i$ define a
# decomposition of $B$.
E = sum(Z_E, self.zero_subvariety())
L_B = self.lattice()
L_E = E.lattice()
L_F = F.lattice()
decomp_matrix = L_E.basis_matrix().stack(L_F.basis_matrix())
# Now we compute explicitly the ZZ-linear map (over
# QQ) from L_B that is "projection onto L_F". This
# means write each element of a basis for L_B in terms
# of decomp_matrix, then take the bottom coordinates.
X = decomp_matrix.solve_left(L_B.basis_matrix())
# Now row of X gives each element of L_B as a linear
# combination of the rows of decomp_matrix. We
# project onto L_F by taking the right-most part of
# this matrix.
n = X.ncols()
proj = X.matrix_from_columns(range(n-L_F.rank(), n))
# Now proj is the matrix of projection that goes from
# L_B to L_F, wrt the basis of those spaces.
section = proj**(-1)
# Now section maps L_F to L_B (tensor QQ). Now we
# just take each factor of F, which corresponds to a
# submodule of L_F, and map it over to L_B tensor QQ
# and saturate.
D = []
groups = self.groups()
K = self.base_field()
for A in Z_F:
L_A = A.lattice()
M = L_F.coordinate_module(L_A).basis_matrix() * section
M, _ = M._clear_denom()
M = M.saturation()
M = M * L_B.basis_matrix()
lattice = M.row_module(ZZ)
the_factor = ModularAbelianVariety(groups, lattice, K, is_simple=True, newform_level=A.newform_level(),
isogeny_number=A.isogeny_number(), number=A.degen_t())
D.append(the_factor)
################
if isinstance(D, Sequence_generic):
S = D
else:
D.sort()
S = Sequence(D, immutable=True, cr=True, universe=self.category())
self.__decomposition[(simple, bound)] = S
return S
def _classify_ambient_factors(self, simple=True, bound=None):
r"""
This function implements the following algorithm, which produces
data useful in finding a decomposition or complement of self.
#. Suppose `A_1 + \cdots + A_n` is a simple decomposition
of the ambient space.
#. For each `i`, let
`B_i = A_1 + \cdots + A_i`.
#. For each `i`, compute the intersection `C_i` of
`B_i` and self.
#. For each `i`, if the dimension of `C_i` is
bigger than `C_{i-1}` put `i` in the "in" list;
otherwise put `i` in the "out" list.
Then one can show that self is isogenous to the sum of the
`A_i` with `i` in the "in" list. Moreover, the sum
of the `A_j` with `i` in the "out" list is a
complement of self in the ambient space.
INPUT:
- ``simple`` - bool (default: True)
- ``bound`` - integer (default: None); if given,
passed onto decomposition function
OUTPUT: IN list OUT list simple (or power of simple) factors
EXAMPLES::
sage: d1 = J0(11).degeneracy_map(33, 1); d1
Degeneracy map from Abelian variety J0(11) of dimension 1 to Abelian variety J0(33) of dimension 3 defined by [1]
sage: d2 = J0(11).degeneracy_map(33, 3); d2
Degeneracy map from Abelian variety J0(11) of dimension 1 to Abelian variety J0(33) of dimension 3 defined by [3]
sage: A = (d1 + d2).image(); A
Abelian subvariety of dimension 1 of J0(33)
sage: A._classify_ambient_factors()
([1], [0, 2], [
Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33),
Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33),
Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33)
])
"""
# Decompose an arbitrary abelian variety
amb = self.ambient_variety()
S = self.vector_space()
X = amb.decomposition(simple=simple, bound=bound)
IN = []; OUT = []
i = 0
V = 0
last_dimension = 0
for j in range(len(X)):
V += X[j].vector_space()
d = S.intersection(V).dimension()
if d > last_dimension:
IN.append(j)
last_dimension = d
else:
OUT.append(j)
return IN, OUT, X
def _isogeny_to_product_of_simples(self):
r"""
Given an abelian variety `A`, return an isogeny
`\phi: A \rightarrow B_1 \times \cdots \times B_n`, where
each `B_i` is simple. Note that this isogeny is not
unique.
EXAMPLES::
sage: J = J0(37) ; J.decomposition()
[
Simple abelian subvariety 37a(1,37) of dimension 1 of J0(37),
Simple abelian subvariety 37b(1,37) of dimension 1 of J0(37)
]
sage: phi = J._isogeny_to_product_of_simples() ; phi
Abelian variety morphism:
From: Abelian variety J0(37) of dimension 2
To: Abelian subvariety of dimension 2 of J0(37) x J0(37)
sage: J[0].intersection(J[1]) == phi.kernel()
True
::
sage: J = J0(22) * J0(37)
sage: J._isogeny_to_product_of_simples()
Abelian variety morphism:
From: Abelian variety J0(22) x J0(37) of dimension 4
To: Abelian subvariety of dimension 4 of J0(11) x J0(11) x J0(37) x J0(37)
"""
try:
return self._simple_product_isogeny
except AttributeError:
pass
D = self.decomposition()
dest = prod([d._isogeny_to_newform_abelian_variety().image() for d in D])
A = self.ambient_variety()
dim = sum([d.dimension() for d in D])
proj_ls = [ A.projection(factor) for factor in D ]
mat = matrix(ZZ, 2*self.dimension(), 2*dim)
ind = 0
for i in range(len(D)):
factor = D[i]
proj = proj_ls[i]
mat.set_block(0, ind, proj.restrict_domain(self).matrix())
ind += 2*factor.dimension()
H = self.Hom(dest)
self._simple_product_isogeny = H(Morphism(H, mat))
return self._simple_product_isogeny
def _isogeny_to_product_of_powers(self):
r"""
Given an abelian variety `A`, return an isogeny
`\phi: A \rightarrow B_1 \times \cdots \times B_n`, where
each `B_i` is a power of a simple abelian variety. These
factors will be exactly those returned by
self.decomposition(simple=False).Note that this isogeny is not
unique.
EXAMPLES::
sage: J = J0(33) ; D = J.decomposition(simple=False) ; len(D)
2
sage: phi = J._isogeny_to_product_of_powers() ; phi
Abelian variety morphism:
From: Abelian variety J0(33) of dimension 3
To: Abelian subvariety of dimension 3 of J0(33) x J0(33)
::
sage: J = J0(22) * J0(37)
sage: J._isogeny_to_product_of_powers()
Abelian variety morphism:
From: Abelian variety J0(22) x J0(37) of dimension 4
To: Abelian subvariety of dimension 4 of J0(22) x J0(37) x J0(22) x J0(37) x J0(22) x J0(37)
"""
try:
return self._simple_power_product_isogeny
except AttributeError:
pass
D = self.decomposition(simple=False)
A = self.ambient_variety()
proj_ls = [ A.projection(factor) for factor in D ]
dest = prod([phi.image() for phi in proj_ls])
dim = sum([d.dimension() for d in D])
mat = matrix(ZZ, 2*self.dimension(), 2*dim)
ind = 0
for i in range(len(D)):
factor = D[i]
proj = proj_ls[i]
mat.set_block(0, ind, proj.restrict_domain(self).matrix())
ind += 2*factor.dimension()
H = self.Hom(dest)
self._simple_power_product_isogeny = H(Morphism(H, mat))
return self._simple_power_product_isogeny
def complement(self, A=None):
"""
Return a complement of this abelian variety.
INPUT:
- ``A`` - (default: None); if given, A must be an
abelian variety that contains self, in which case the complement of
self is taken inside A. Otherwise the complement is taken in the
ambient product Jacobian.
OUTPUT: abelian variety
EXAMPLES::
sage: a,b,c = J0(33)
sage: (a+b).complement()
Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33)
sage: (a+b).complement() == c
True
sage: a.complement(a+b)
Abelian subvariety of dimension 1 of J0(33)
"""
try:
C = self.__complement
except AttributeError:
pass
if self.dimension() is 0:
if A is None:
C = self.ambient_variety()
else:
C = A
elif A is not None and self.dimension() == A.dimension():
if not self.is_subvariety(A):
raise ValueError("self must be a subvariety of A")
C = self.zero_subvariety()
else:
_, factors, X = self._classify_ambient_factors()
D = [X[i] for i in factors]
C = sum(D)
if C:
self.__complement = C
if A is not None:
C = C.intersection(A)[1]
else:
C = self.zero_subvariety()
return C
def dual(self):
r"""
Return the dual of this abelian variety.
OUTPUT:
- dual abelian variety
- morphism from self to dual
- covering morphism from J to dual
.. warning::
This is currently only implemented when self is an abelian
subvariety of the ambient Jacobian product, and the
complement of self in the ambient product Jacobian share no
common factors. A more general implementation will require
implementing computation of the intersection pairing on
integral homology and the resulting Weil pairing on
torsion.
EXAMPLES: We compute the dual of the elliptic curve newform abelian
variety of level `33`, and find the kernel of the modular
map, which has structure `(\ZZ/3)^2`.
::
sage: A,B,C = J0(33)
sage: C
Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33)
sage: Cd, f, pi = C.dual()
sage: f.matrix()
[3 0]
[0 3]
sage: f.kernel()[0]
Finite subgroup with invariants [3, 3] over QQ of Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33)
By a theorem the modular degree must thus be `3`::
sage: E = EllipticCurve('33a')
sage: E.modular_degree()
3
Next we compute the dual of a `2`-dimensional new simple
abelian subvariety of `J_0(43)`.
::
sage: A = AbelianVariety('43b'); A
Newform abelian subvariety 43b of dimension 2 of J0(43)
sage: Ad, f, pi = A.dual()
The kernel shows that the modular degree is `2`::
sage: f.kernel()[0]
Finite subgroup with invariants [2, 2] over QQ of Newform abelian subvariety 43b of dimension 2 of J0(43)
Unfortunately, the dual is not implemented in general::
sage: A = J0(22)[0]; A
Simple abelian subvariety 11a(1,22) of dimension 1 of J0(22)
sage: A.dual()
Traceback (most recent call last):
...
NotImplementedError: dual not implemented unless complement shares no simple factors with self.
"""
try:
return self.__dual
except AttributeError:
if not self.is_subvariety_of_ambient_jacobian():
raise NotImplementedError("dual not implemented unless abelian variety is a subvariety of the ambient Jacobian product")
if not self._complement_shares_no_factors_with_same_label():
raise NotImplementedError("dual not implemented unless complement shares no simple factors with self.")
C = self.complement()
Q, phi = self.ambient_variety().quotient(C)
psi = self.ambient_morphism()
self.__dual = Q, phi*psi, phi
return self.__dual
def _factors_with_same_label(self, other):
"""
Given two modular abelian varieties self and other, this function
returns a list of simple abelian subvarieties appearing in the
decomposition of self that have the same newform labels. Each
simple factor with a given newform label appears at most one.
INPUT:
- ``other`` - abelian variety
OUTPUT: list of simple abelian varieties
EXAMPLES::
sage: D = J0(33).decomposition(); D
[
Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33),
Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33),
Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33)
]
sage: D[0]._factors_with_same_label(D[1])
[Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)]
sage: D[0]._factors_with_same_label(D[2])
[]
sage: (D[0]+D[1])._factors_with_same_label(D[1] + D[2])
[Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)]
This illustrates that the multiplicities in the returned list are
1::
sage: (D[0]+D[1])._factors_with_same_label(J0(33))
[Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)]
This illustrates that the ambient product Jacobians do not have to
be the same::
sage: (D[0]+D[1])._factors_with_same_label(J0(22))
[Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)]
This illustrates that the actual factor labels are relevant, not
just the isogeny class.
::
sage: (D[0]+D[1])._factors_with_same_label(J1(11))
[]
sage: J1(11)[0].newform_label()
'11aG1'
"""
if not isinstance(other, ModularAbelianVariety_abstract):
raise TypeError("other must be an abelian variety")
D = self.decomposition()
C = set([A.newform_label() for A in other.decomposition()])
Z = []
for X in D:
lbl = X.newform_label()
if lbl in C:
Z.append(X)
C.remove(lbl)
Z.sort()
return Z
def _complement_shares_no_factors_with_same_label(self):
"""
Return True if no simple factor of self has the same newform_label
as any factor in a Poincare complement of self in the ambient
product Jacobian.
EXAMPLES: `J_0(37)` is made up of two non-isogenous
elliptic curves::
sage: J0(37)[0]._complement_shares_no_factors_with_same_label()
True
`J_0(33)` decomposes as a product of two isogenous
elliptic curves with a third nonisogenous curve::
sage: D = J0(33).decomposition(); D
[
Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33),
Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33),
Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33)
]
sage: D[0]._complement_shares_no_factors_with_same_label()
False
sage: (D[0]+D[1])._complement_shares_no_factors_with_same_label()
True
sage: D[2]._complement_shares_no_factors_with_same_label()
True
This example illustrates the relevance of the ambient product
Jacobian.
::
sage: D = (J0(11) * J0(11)).decomposition(); D
[
Simple abelian subvariety 11a(1,11) of dimension 1 of J0(11) x J0(11),
Simple abelian subvariety 11a(1,11) of dimension 1 of J0(11) x J0(11)
]
sage: D[0]._complement_shares_no_factors_with_same_label()
False
This example illustrates that it is the newform label, not the
isogeny, class that matters::
sage: D = (J0(11)*J1(11)).decomposition(); D
[
Simple abelian subvariety 11aG1(1,11) of dimension 1 of J0(11) x J1(11),
Simple abelian subvariety 11a(1,11) of dimension 1 of J0(11) x J1(11)
]
sage: D[0]._complement_shares_no_factors_with_same_label()
True
sage: D[0].newform_label()
'11aG1'
sage: D[1].newform_label()
'11a'
"""
try:
return self.__complement_shares
except AttributeError:
t = len(self._factors_with_same_label(self.complement())) == 0
self.__complement_shares = t
return t
def __getitem__(self, i):
"""
Returns the `i^{th}` decomposition factor of self
or returns the slice `i` of decompositions of self.
EXAMPLES::
sage: J = J0(389)
sage: J.decomposition()
[
Simple abelian subvariety 389a(1,389) of dimension 1 of J0(389),
Simple abelian subvariety 389b(1,389) of dimension 2 of J0(389),
Simple abelian subvariety 389c(1,389) of dimension 3 of J0(389),
Simple abelian subvariety 389d(1,389) of dimension 6 of J0(389),
Simple abelian subvariety 389e(1,389) of dimension 20 of J0(389)
]
sage: J[2]
Simple abelian subvariety 389c(1,389) of dimension 3 of J0(389)
sage: J[-1]
Simple abelian subvariety 389e(1,389) of dimension 20 of J0(389)
sage: J = J0(125); J.decomposition()
[
Simple abelian subvariety 125a(1,125) of dimension 2 of J0(125),
Simple abelian subvariety 125b(1,125) of dimension 2 of J0(125),
Simple abelian subvariety 125c(1,125) of dimension 4 of J0(125)
]
sage: J[:2]
[
Simple abelian subvariety 125a(1,125) of dimension 2 of J0(125),
Simple abelian subvariety 125b(1,125) of dimension 2 of J0(125)
]
"""
return self.decomposition()[i]
class ModularAbelianVariety(ModularAbelianVariety_abstract):
def __init__(self, groups, lattice=None, base_field=QQ, is_simple=None, newform_level=None,
isogeny_number=None, number=None, check=True):
r"""
Create a modular abelian variety with given level and base field.
INPUT:
- ``groups`` - a tuple of congruence subgroups
- ``lattice`` - (default: `\ZZ^n`) a
full lattice in `\ZZ^n`, where `n` is the
sum of the dimensions of the spaces of cuspidal modular symbols
corresponding to each `\Gamma \in` groups
- ``base_field`` - a field (default:
`\QQ`)
EXAMPLES::
sage: J0(23)
Abelian variety J0(23) of dimension 2
"""
ModularAbelianVariety_abstract.__init__(self, groups, base_field, is_simple=is_simple, newform_level=newform_level,
isogeny_number=isogeny_number, number=number, check=check)
if lattice is None:
lattice = ZZ**(2*self._ambient_dimension())
if check:
n = self._ambient_dimension()
if not is_FreeModule(lattice):
raise TypeError("lattice must be a free module")
if lattice.base_ring() != ZZ:
raise TypeError("lattice must be over ZZ")
if lattice.degree() != 2*n:
raise ValueError("lattice must have degree 2*n (=%s)"%(2*n))
if not lattice.saturation().is_submodule(lattice): # potentially expensive
raise ValueError("lattice must be full")
self.__lattice = lattice
def lattice(self):
"""
Return the lattice that defines this abelian variety.
OUTPUT:
- ``lattice`` - a lattice embedded in the rational
homology of the ambient product Jacobian
EXAMPLES::
sage: A = (J0(11) * J0(37))[1]; A
Simple abelian subvariety 37a(1,37) of dimension 1 of J0(11) x J0(37)
sage: type(A)
<class 'sage.modular.abvar.abvar.ModularAbelianVariety_with_category'>
sage: A.lattice()
Free module of degree 6 and rank 2 over Integer Ring
Echelon basis matrix:
[ 0 0 1 -1 1 0]
[ 0 0 0 0 2 -1]
"""
return self.__lattice
class ModularAbelianVariety_modsym_abstract(ModularAbelianVariety_abstract):
# Anything that derives from this class must define the
# modular_symbols method, which returns a cuspidal modular symbols
# space over QQ. It can have any sign.
def _modular_symbols(self):
"""
Return the space of modular symbols corresponding to this modular
symbols abelian variety.
EXAMPLES: This function is in the abstract base class, so it raises
a NotImplementedError::
sage: M = ModularSymbols(37).cuspidal_submodule()
sage: A = M.abelian_variety(); A
Abelian variety J0(37) of dimension 2
sage: sage.modular.abvar.abvar.ModularAbelianVariety_modsym_abstract._modular_symbols(A)
Traceback (most recent call last):
...
NotImplementedError: bug -- must define this
Of course this function isn't called in practice, so this works::
sage: A._modular_symbols()
Modular Symbols subspace of dimension 4 of Modular Symbols space of dimension 5 for Gamma_0(37) of weight 2 with sign 0 over Rational Field
"""
raise NotImplementedError("bug -- must define this")
def __add__(self, other):
"""
Add two modular abelian variety factors.
EXAMPLES::
sage: A = J0(42); D = A.decomposition(); D
[
Simple abelian subvariety 14a(1,42) of dimension 1 of J0(42),
Simple abelian subvariety 14a(3,42) of dimension 1 of J0(42),
Simple abelian subvariety 21a(1,42) of dimension 1 of J0(42),
Simple abelian subvariety 21a(2,42) of dimension 1 of J0(42),
Simple abelian subvariety 42a(1,42) of dimension 1 of J0(42)
]
sage: D[0] + D[1]
Abelian subvariety of dimension 2 of J0(42)
sage: D[1].is_subvariety(D[0] + D[1])
True
sage: D[0] + D[1] + D[2]
Abelian subvariety of dimension 3 of J0(42)
sage: D[0] + D[0]
Abelian subvariety of dimension 1 of J0(42)
sage: D[0] + D[0] == D[0]
True
sage: sum(D, D[0]) == A
True
"""
if not is_ModularAbelianVariety(other):
if other == 0:
return self
raise TypeError("sum not defined")
if not isinstance(other, ModularAbelianVariety_modsym_abstract):
return ModularAbelianVariety_abstract.__add__(self, other)
if self.groups() != other.groups():
raise TypeError("sum not defined since ambient spaces different")
M = self.modular_symbols() + other.modular_symbols()
return ModularAbelianVariety_modsym(M)
def groups(self):
"""
Return the tuple of groups associated to the modular symbols
abelian variety. This is always a 1-tuple.
OUTPUT: tuple
EXAMPLES::
sage: A = ModularSymbols(33).cuspidal_submodule().abelian_variety(); A
Abelian variety J0(33) of dimension 3
sage: A.groups()
(Congruence Subgroup Gamma0(33),)
sage: type(A)
<class 'sage.modular.abvar.abvar.ModularAbelianVariety_modsym_with_category'>
"""
return (self._modular_symbols().group(), )
def lattice(self):
r"""
Return the lattice defining this modular abelian variety.
OUTPUT:
A free `\ZZ`-module embedded in an ambient `\QQ`-vector space.
EXAMPLES::
sage: A = ModularSymbols(33).cuspidal_submodule()[0].abelian_variety(); A
Abelian subvariety of dimension 1 of J0(33)
sage: A.lattice()
Free module of degree 6 and rank 2 over Integer Ring
User basis matrix:
[ 1 0 0 -1 0 0]
[ 0 0 1 0 1 -1]
sage: type(A)
<class 'sage.modular.abvar.abvar.ModularAbelianVariety_modsym_with_category'>
"""
try:
return self.__lattice
except AttributeError:
M = self.modular_symbols()
S = M.ambient_module().cuspidal_submodule()
if M.dimension() == S.dimension():
L = ZZ**M.dimension()
else:
K0 = M.integral_structure()
K1 = S.integral_structure()
L = K1.coordinate_module(K0)
self.__lattice = L
return self.__lattice
def _set_lattice(self, lattice):
"""
Set the lattice of this modular symbols abelian variety.
.. warning::
This is only for internal use. Do not use this unless you
really really know what you're doing. That's why there is
an underscore in this method name.
INPUT:
- ``lattice`` - a lattice
EXAMPLES: We do something evil - there's no type checking since
this function is for internal use only::
sage: A = ModularSymbols(33).cuspidal_submodule().abelian_variety()
sage: A._set_lattice(5)
sage: A.lattice()
5
"""
self.__lattice = lattice
def modular_symbols(self, sign=0):
"""
Return space of modular symbols (with given sign) associated to
this modular abelian variety, if it can be found by cutting down
using Hecke operators. Otherwise raise a RuntimeError exception.
EXAMPLES::
sage: A = J0(37)
sage: A.modular_symbols()
Modular Symbols subspace of dimension 4 of Modular Symbols space of dimension 5 for Gamma_0(37) of weight 2 with sign 0 over Rational Field
sage: A.modular_symbols(1)
Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 3 for Gamma_0(37) of weight 2 with sign 1 over Rational Field
More examples::
sage: J0(11).modular_symbols()
Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 3 for Gamma_0(11) of weight 2 with sign 0 over Rational Field
sage: J0(11).modular_symbols(sign=1)
Modular Symbols subspace of dimension 1 of Modular Symbols space of dimension 2 for Gamma_0(11) of weight 2 with sign 1 over Rational Field
sage: J0(11).modular_symbols(sign=0)
Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 3 for Gamma_0(11) of weight 2 with sign 0 over Rational Field
sage: J0(11).modular_symbols(sign=-1)
Modular Symbols space of dimension 1 for Gamma_0(11) of weight 2 with sign -1 over Rational Field
Even more examples::
sage: A = J0(33)[1]; A
Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33)
sage: A.modular_symbols()
Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field
It is not always possible to determine the sign subspaces::
sage: A.modular_symbols(1)
Traceback (most recent call last):
...
RuntimeError: unable to determine sign (=1) space of modular symbols
::
sage: A.modular_symbols(-1)
Traceback (most recent call last):
...
RuntimeError: unable to determine sign (=-1) space of modular symbols
"""
M = self._modular_symbols().modular_symbols_of_sign(sign)
if (sign != 0 and M.dimension() != self.dimension()) or (sign == 0 and M.dimension() != 2*self.dimension()):
raise RuntimeError("unable to determine sign (=%s) space of modular symbols"%sign)
return M
def _compute_hecke_polynomial(self, n, var='x'):
"""
Return the characteristic polynomial of the `n^{th}` Hecke
operator on self.
.. note::
If self has dimension d, then this is a polynomial of
degree d. It is not of degree 2\*d, so it is the square
root of the characteristic polynomial of the Hecke operator
on integral or rational homology (which has degree 2\*d).
EXAMPLES::
sage: J0(11).hecke_polynomial(2)
x + 2
sage: J0(23)._compute_hecke_polynomial(2)
x^2 + x - 1
sage: J1(13).hecke_polynomial(2)
x^2 + 3*x + 3
sage: factor(J0(43).hecke_polynomial(2))
(x + 2) * (x^2 - 2)
The Hecke polynomial is the square root of the characteristic
polynomial::
sage: factor(J0(43).hecke_operator(2).charpoly())
(x + 2) * (x^2 - 2)
"""
return sqrt_poly(self.modular_symbols().hecke_polynomial(n, var))
def _integral_hecke_matrix(self, n, sign=0):
"""
Return the action of the Hecke operator `T_n` on the
integral homology of self.
INPUT:
- ``n`` - a positive integer
- ``sign`` - 0, +1, or -1; if 1 or -1 act on the +1 or
-1 quotient of the integral homology.
EXAMPLES::
sage: J1(13)._integral_hecke_matrix(2) # slightly random choice of basis
[-2 0 -1 1]
[ 1 -1 0 -1]
[ 1 1 -2 0]
[ 0 1 -1 -1]
sage: J1(13)._integral_hecke_matrix(2,sign=1) # slightly random choice of basis
[-1 1]
[-1 -2]
sage: J1(13)._integral_hecke_matrix(2,sign=-1) # slightly random choice of basis
[-2 -1]
[ 1 -1]
"""
return self.modular_symbols(sign).integral_hecke_matrix(n)
def _rational_hecke_matrix(self, n, sign=0):
"""
Return the action of the Hecke operator `T_n` on the
rational homology of self.
INPUT:
- ``n`` - a positive integer
- ``sign`` - 0, +1, or -1; if 1 or -1 act on the +1 or
-1 quotient of the rational homology.
EXAMPLES::
sage: J1(13)._rational_hecke_matrix(2) # slightly random choice of basis
[-2 0 -1 1]
[ 1 -1 0 -1]
[ 1 1 -2 0]
[ 0 1 -1 -1]
sage: J0(43)._rational_hecke_matrix(2,sign=1) # slightly random choice of basis
[-2 0 1]
[-1 -2 2]
[-2 0 2]
"""
return self._integral_hecke_matrix(n, sign=sign).change_ring(QQ)
def group(self):
"""
Return the congruence subgroup associated that this modular abelian
variety is associated to.
EXAMPLES::
sage: J0(13).group()
Congruence Subgroup Gamma0(13)
sage: J1(997).group()
Congruence Subgroup Gamma1(997)
sage: JH(37,[3]).group()
Congruence Subgroup Gamma_H(37) with H generated by [3]
sage: J0(37)[1].groups()
(Congruence Subgroup Gamma0(37),)
"""
return self.modular_symbols().group()
def is_subvariety(self, other):
"""
Return True if self is a subvariety of other.
EXAMPLES::
sage: J = J0(37); J
Abelian variety J0(37) of dimension 2
sage: A = J[0]; A
Simple abelian subvariety 37a(1,37) of dimension 1 of J0(37)
sage: A.is_subvariety(J)
True
sage: A.is_subvariety(J0(11))
False
There may be a way to map `A` into `J_0(74)`, but
`A` is not equipped with any special structure of an
embedding.
::
sage: A.is_subvariety(J0(74))
False
Some ambient examples::
sage: J = J0(37)
sage: J.is_subvariety(J)
True
sage: J.is_subvariety(25)
False
More examples::
sage: A = J0(42); D = A.decomposition(); D
[
Simple abelian subvariety 14a(1,42) of dimension 1 of J0(42),
Simple abelian subvariety 14a(3,42) of dimension 1 of J0(42),
Simple abelian subvariety 21a(1,42) of dimension 1 of J0(42),
Simple abelian subvariety 21a(2,42) of dimension 1 of J0(42),
Simple abelian subvariety 42a(1,42) of dimension 1 of J0(42)
]
sage: D[0].is_subvariety(A)
True
sage: D[1].is_subvariety(D[0] + D[1])
True
sage: D[2].is_subvariety(D[0] + D[1])
False
"""
if not is_ModularAbelianVariety(other):
return False
if not isinstance(other, ModularAbelianVariety_modsym_abstract):
return ModularAbelianVariety_abstract.is_subvariety(self, other)
return self.modular_symbols().is_submodule(other.modular_symbols())
def is_ambient(self):
"""
Return True if this abelian variety attached to a modular symbols
space space is attached to the cuspidal subspace of the ambient
modular symbols space.
OUTPUT: bool
EXAMPLES::
sage: A = ModularSymbols(43).cuspidal_subspace().abelian_variety(); A
Abelian variety J0(43) of dimension 3
sage: A.is_ambient()
True
sage: type(A)
<class 'sage.modular.abvar.abvar.ModularAbelianVariety_modsym_with_category'>
sage: A = ModularSymbols(43).cuspidal_subspace()[1].abelian_variety(); A
Abelian subvariety of dimension 2 of J0(43)
sage: A.is_ambient()
False
"""
return self.degree() == self.dimension()
def dimension(self):
"""
Return the dimension of this modular abelian variety.
EXAMPLES::
sage: J0(37)[0].dimension()
1
sage: J0(43)[1].dimension()
2
sage: J1(17)[1].dimension()
4
"""
try:
return self._dimension
except AttributeError:
M = self._modular_symbols()
if M.sign() == 0:
d = M.dimension() // 2
else:
d = M.dimension()
self._dimension = d
return d
def new_subvariety(self, p=None):
"""
Return the new or `p`-new subvariety of self.
INPUT:
- ``self`` - a modular abelian variety
- ``p`` - prime number or None (default); if p is a
prime, return the p-new subvariety. Otherwise return the full new
subvariety.
EXAMPLES::
sage: J0(33).new_subvariety()
Abelian subvariety of dimension 1 of J0(33)
sage: J0(100).new_subvariety()
Abelian subvariety of dimension 1 of J0(100)
sage: J1(13).new_subvariety()
Abelian variety J1(13) of dimension 2
"""
try:
return self.__new_subvariety[p]
except AttributeError:
self.__new_subvariety = {}
except KeyError:
pass
A = self.modular_symbols()
N = A.new_submodule(p=p)
B = ModularAbelianVariety_modsym(N)
self.__new_subvariety[p] = B
return B
def old_subvariety(self, p=None):
"""
Return the old or `p`-old abelian variety of self.
INPUT:
- ``self`` - a modular abelian variety
- ``p`` - prime number or None (default); if p is a
prime, return the p-old subvariety. Otherwise return the full old
subvariety.
EXAMPLES::
sage: J0(33).old_subvariety()
Abelian subvariety of dimension 2 of J0(33)
sage: J0(100).old_subvariety()
Abelian subvariety of dimension 6 of J0(100)
sage: J1(13).old_subvariety()
Abelian subvariety of dimension 0 of J1(13)
"""
try:
return self.__old_subvariety[p]
except AttributeError:
self.__old_subvariety = {}
except KeyError:
pass
A = self.modular_symbols()
N = A.old_submodule(p=p)
B = ModularAbelianVariety_modsym(N)
self.__old_subvariety[p] = B
return B
def decomposition(self, simple=True, bound=None):
r"""
Decompose this modular abelian variety as a product of abelian
subvarieties, up to isogeny.
INPUT: simple- bool (default: True) if True, all factors are
simple. If False, each factor returned is isogenous to a power of a
simple and the simples in each factor are distinct.
- ``bound`` - int (default: None) if given, only use
Hecke operators up to this bound when decomposing. This can give
wrong answers, so use with caution!
EXAMPLES::
sage: J = J0(33)
sage: J.decomposition()
[
Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33),
Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33),
Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33)
]
sage: J1(17).decomposition()
[
Simple abelian subvariety 17aG1(1,17) of dimension 1 of J1(17),
Simple abelian subvariety 17bG1(1,17) of dimension 4 of J1(17)
]
"""
try:
return self.__decomposition[(simple, bound)]
except KeyError:
pass
except AttributeError:
self.__decomposition = {}
if not self.is_ambient():
S = ModularAbelianVariety_abstract.decomposition(self, simple=simple, bound=bound)
else:
A = self.modular_symbols()
amb = A.ambient_module()
G = amb.group()
S = amb.cuspidal_submodule().integral_structure()
if simple:
M = A.level()
D = []
for N in reversed(divisors(M)):
if N > 1:
isogeny_number = 0
A = amb.modular_symbols_of_level(N).cuspidal_subspace().new_subspace()
if bound is None:
X = factor_new_space(A)
else:
X = A.decomposition(bound = bound)
for B in X:
for t in divisors(M//N):
D.append(ModularAbelianVariety_modsym(B.degeneracy_map(M, t).image(),
is_simple=True, newform_level=(N, G),
isogeny_number=isogeny_number,
number=(t,M)))
isogeny_number += 1
elif A == amb.cuspidal_submodule():
D = [ModularAbelianVariety_modsym(B) for B in A.decomposition(bound = bound)]
else:
D = ModularAbelianVariety_abstract.decomposition(self, simple=simple, bound=bound)
D.sort()
S = Sequence(D, immutable=True, cr=True, universe=self.category())
self.__decomposition[(simple, bound)] = S
return S
class ModularAbelianVariety_modsym(ModularAbelianVariety_modsym_abstract):
def __init__(self, modsym, lattice=None, newform_level=None,
is_simple=None, isogeny_number=None, number=None, check=True):
"""
Modular abelian variety that corresponds to a Hecke stable space of
cuspidal modular symbols.
EXAMPLES: We create a modular abelian variety attached to a space
of modular symbols.
::
sage: M = ModularSymbols(23).cuspidal_submodule()
sage: A = M.abelian_variety(); A
Abelian variety J0(23) of dimension 2
"""
if check:
if not isinstance(modsym, ModularSymbolsSpace):
raise TypeError("modsym must be a modular symbols space")
if modsym.sign() != 0:
raise TypeError("modular symbols space must have sign 0")
if not modsym.is_cuspidal():
raise ValueError("modsym must be cuspidal")
ModularAbelianVariety_abstract.__init__(self, (modsym.group(), ), modsym.base_ring(),
newform_level=newform_level, is_simple=is_simple,
isogeny_number=isogeny_number, number=number, check=check)
if lattice is not None:
self._set_lattice(lattice)
self.__modsym = modsym
def _modular_symbols(self):
"""
Return the modular symbols space that defines this modular abelian
variety.
OUTPUT: space of modular symbols
EXAMPLES::
sage: M = ModularSymbols(37).cuspidal_submodule()
sage: A = M.abelian_variety(); A
Abelian variety J0(37) of dimension 2
sage: A._modular_symbols()
Modular Symbols subspace of dimension 4 of Modular Symbols space of dimension 5 for Gamma_0(37) of weight 2 with sign 0 over Rational Field
"""
return self.__modsym
def component_group_order(self, p):
"""
Return the order of the component group of the special fiber
at p of the Neron model of self.
NOTE: For bad primes, this is only implemented when the group
if Gamma0 and p exactly divides the level.
NOTE: the input abelian variety must be simple
ALGORITHM: See "Component Groups of Quotients of J0(N)" by Kohel and Stein. That
paper is about optimal quotients; however, section 4.1 of Conrad-Stein "Component
Groups of Purely Toric Quotients", one sees that the component group of an optimal
quotient is the same as the component group of its dual (which is the subvariety).
INPUT:
- p -- a prime number
OUTPUT:
- Integer
EXAMPLES::
sage: A = J0(37)[1]
sage: A.component_group_order(37)
3
sage: A = J0(43)[1]
sage: A.component_group_order(37)
1
sage: A.component_group_order(43)
7
sage: A = J0(23)[0]
sage: A.component_group_order(23)
11
"""
if not self.is_simple():
raise ValueError("self must be simple")
p = Integer(p)
if not p.is_prime(): raise ValueError("p must be a prime integer")
try: return self.__component_group[p][0]
except AttributeError:
self.__component_group = {}
except KeyError: pass
# Easy special case -- a prime of good reduction
if self.level() % p != 0:
one = Integer(1)
self.__component_group[p] = (one,one,one)
return one
# Cases that we don't know how to handle yet.
if not is_Gamma0(self.group()):
raise NotImplementedError("computation of component group not implemented when group isn't Gamma0")
if self.level() % (p*p) == 0:
raise NotImplementedError("computation of component group not implemented when p^2 divides the level")
# Now we're on Gamma0(p*M) with gcd(p,M) = 1.
# 1. Compute factor of Brandt module space, and put integral structure on it.
# TODO -- in case self.level() is prime, should use
# supersingular module instead for massive speedup... Of
# course, then one can just use Emertons theorem that the
# component group order equals the torsion order, and avoid
# all of this!
XI = self.brandt_module(p)
Y = XI.ambient_module()
n = Y.dimension()
# X_ZZ is the submodule of degree 0 divisors
M = ZZ**n
deg_zero = []
for k in range(1,n):
v = vector(ZZ, n)
v[0] = 1
v[k] = -1
deg_zero.append(v)
X_ZZ = M.span(deg_zero, ZZ)
XI_ZZ = XI.free_module().intersection(M)
# 2. Compute the map alpha: X --> Hom(X[I],Z) over ZZ
# todo -- this could be done more quickly with a clever matrix multiply
B = [XI(v) for v in XI_ZZ.basis()]
mat = []
for v in M.basis():
w = Y(v)
mat.append([w.monodromy_pairing(b) for b in B])
monodromy = matrix(ZZ, mat)
alpha = X_ZZ.basis_matrix().change_ring(ZZ) * monodromy
# 3. Compute invariants:
# * Phi_X = #coker(alpha)
# * m_X = #(alpha(X)/alpha(X[I]))
alphaX = alpha.row_module()
Phi_X_invariants = alphaX.basis_matrix().change_ring(ZZ).elementary_divisors()
Phi_X = prod(Phi_X_invariants + [Integer(1)])
W = alphaX.span([b*monodromy for b in XI_ZZ.basis()], ZZ)
m_X = Integer(W.index_in(alphaX))
# 4. Compute the modular degree
moddeg = self.modular_degree()
# 5. Obtain the component group order using Theorem 1 of [Kohel-Stein]
Phi = Phi_X * moddeg / m_X
# 6. Record the answer
self.__component_group[p] = (Phi, Phi_X_invariants, m_X)
return Phi
def _invariants_of_image_of_component_group_of_J0(self, p):
"""
Return the elementary invariants of the image of the component
group of J0(N). The API of this function is subject to
change, which is why it starts with an underscore.
INPUT:
- p -- integer
OUTPUT:
- list -- of elementary invariants
EXAMPLES::
sage: A = J0(62).new_subvariety()[1]; A
Simple abelian subvariety 62b(1,62) of dimension 2 of J0(62)
sage: A._invariants_of_image_of_component_group_of_J0(2)
[1, 6]
sage: A.component_group_order(2)
66
"""
self.component_group_order(p)
return list(self.__component_group[p][1]) # make a copy
def tamagawa_number(self, p):
"""
Return the Tamagawa number of this abelian variety at p.
NOTE: For bad primes, this is only implemented when the group
if Gamma0 and p exactly divides the level and Atkin-Lehner
acts diagonally on this abelian variety (e.g., if this variety
is new and simple). See the self.component_group command for
more information.
NOTE: the input abelian variety must be simple
In cases where this function doesn't work, consider using the
self.tamagawa_number_bounds functions.
INPUT:
- p -- a prime number
OUTPUT:
- Integer
EXAMPLES::
sage: A = J0(37)[1]
sage: A.tamagawa_number(37)
3
sage: A = J0(43)[1]
sage: A.tamagawa_number(37)
1
sage: A.tamagawa_number(43)
7
sage: A = J0(23)[0]
sage: A.tamagawa_number(23)
11
"""
try: return self.__tamagawa_number[p]
except AttributeError: self.__tamagawa_number = {}
except KeyError: pass
if not self.is_simple():
raise ValueError("self must be simple")
try:
g = self.component_group_order(p)
except NotImplementedError:
raise NotImplementedError("Tamagawa number can't be determined using known algorithms, so consider using the tamagawa_number_bounds function instead")
div, mul, mul_primes = self.tamagawa_number_bounds(p)
if div == mul:
cp = div
else:
raise NotImplementedError("the Tamagawa number at %s is a power of 2, but the exact power can't be determined using known algorithms. Consider using the tamagawa_number_bounds function instead."%p)
self.__tamagawa_number[p] = cp
return cp
def tamagawa_number_bounds(self, p):
"""
Return a divisor and multiple of the Tamagawa number of self at p.
NOTE: the input abelian variety must be simple
INPUT:
- p -- a prime number
OUTPUT:
- div -- integer; divisor of Tamagawa number at p
- mul -- integer; multiple of Tamagawa number at p
- mul_primes -- tuple; in case mul==0, a list of all
primes that can possibly divide the Tamagawa number at p.
EXAMPLES::
sage: A = J0(63).new_subvariety()[1]; A
Simple abelian subvariety 63b(1,63) of dimension 2 of J0(63)
sage: A.tamagawa_number_bounds(7)
(3, 3, ())
sage: A.tamagawa_number_bounds(3)
(1, 0, (2, 3, 5))
"""
try: return self.__tamagawa_number_bounds[p]
except AttributeError: self.__tamagawa_number_bounds = {}
except KeyError: pass
if not self.is_simple():
raise ValueError("self must be simple")
N = self.level()
div = 1; mul = 0; mul_primes = []
if N % p != 0:
div = 1; mul = 1
elif N.valuation(p) == 1:
M = self.modular_symbols(sign=1)
if is_Gamma0(M.group()):
g = self.component_group_order(p)
W = M.atkin_lehner_operator(p).matrix()
cp = None
if W == -1:
# Frob acts trivially
div = g; mul = g
elif W == 1:
# Frob acts by -1
n = g.valuation(2)
if n <= 1:
div = 2**n
else:
phi_X_invs = self._invariants_of_image_of_component_group_of_J0(p)
m = max(1, len([z for z in phi_X_invs if z%2==0]))
div = 2**m
mul = 2**n
else:
raise NotImplementedError("Atkin-Lehner at p must act as a scalar")
else:
mul_primes = list(sorted(set([p] + [q for q in prime_range(2,2*self.dimension()+2)])))
div = Integer(div)
mul = Integer(mul)
mul_primes = tuple(mul_primes)
self.__tamagawa_number_bounds[p] = (div, mul, mul_primes)
return (div, mul, mul_primes)
def brandt_module(self, p):
"""
Return the Brandt module at p that corresponds to self. This
is the factor of the vector space on the ideal class set in an
order of level N in the quaternion algebra ramified at p and
infinity.
INPUT:
- p -- prime that exactly divides the level
OUTPUT:
- Brandt module space that corresponds to self.
EXAMPLES::
sage: J0(43)[1].brandt_module(43)
Subspace of dimension 2 of Brandt module of dimension 4 of level 43 of weight 2 over Rational Field
sage: J0(43)[1].brandt_module(43).basis()
((1, 0, -1/2, -1/2), (0, 1, -1/2, -1/2))
sage: J0(43)[0].brandt_module(43).basis()
((0, 0, 1, -1),)
sage: J0(35)[0].brandt_module(5).basis()
((1, 0, -1, 0),)
sage: J0(35)[0].brandt_module(7).basis()
((1, -1, 1, -1),)
"""
try: return self.__brandt_module[p]
except AttributeError: self.__brandt_module = {}
except KeyError: pass
p = Integer(p)
if not is_Gamma0(self.group()):
raise NotImplementedError("Brandt module only defined on Gamma0")
if not p.is_prime(): raise ValueError("p must be a prime integer")
if self.level().valuation(p) != 1:
raise ValueError("p must exactly divide the level")
M = self.level() / p
from sage.modular.all import BrandtModule
V = BrandtModule(p, M)
# now cut out version of self in B
S = self.modular_symbols(sign=1)
B = S.hecke_bound()
if self.dimension() <= 3:
q = 2
while V.dimension() > self.dimension() and q <= B:
f = S.hecke_polynomial(q)
V = f(V.hecke_operator(q)).kernel()
q = next_prime(q)
if V.dimension() > self.dimension():
raise RuntimeError("unable to cut out Brandt module (got dimension %s instead of %s)"%(V.dimension(), self.dimension()))
else:
D = V.decomposition()
D = [A for A in D if A.dimension() == self.dimension()]
# now figure out which element of D is isomorphic to self.
q = 2
while len(D) > 1 and q <= B:
f = S.hecke_polynomial(q)
D = [A for A in D if A.hecke_polynomial(q) == f]
q = next_prime(q)
if len(D) != 1:
raise RuntimeError("unable to locate Brandt module (got %s candidates instead of 1)"%(len(D)))
V = D[0]
self.__brandt_module[p] = V
return V
def sqrt_poly(f):
"""
Return the square root of the polynomial `f`.
.. note::
At some point something like this should be a member of the
polynomial class. For now this is just used internally by some
charpoly functions above.
EXAMPLES::
sage: R.<x> = QQ[]
sage: f = (x-1)*(x+2)*(x^2 + 1/3*x + 5)
sage: f
x^4 + 4/3*x^3 + 10/3*x^2 + 13/3*x - 10
sage: sage.modular.abvar.abvar.sqrt_poly(f^2)
x^4 + 4/3*x^3 + 10/3*x^2 + 13/3*x - 10
sage: sage.modular.abvar.abvar.sqrt_poly(f)
Traceback (most recent call last):
...
ValueError: f must be a perfect square
sage: sage.modular.abvar.abvar.sqrt_poly(2*f^2)
Traceback (most recent call last):
...
ValueError: f must be monic
"""
if not f.is_monic():
raise ValueError("f must be monic")
try:
return prod([g**Integer(e/Integer(2)) for g,e in f.factor()])
except TypeError:
raise ValueError("f must be a perfect square")
####################################################################################################
# Useful for decomposing exactly the sort of modular symbols spaces that come up here.
from random import randrange
from sage.rings.arith import next_prime
def random_hecke_operator(M, t=None, p=2):
"""
Return a random Hecke operator acting on `M`, got by adding
to `t` a random multiple of `T_p`
INPUT:
- ``M`` - modular symbols space
- ``t`` - None or a Hecke operator
- ``p`` - a prime
OUTPUT: Hecke operator prime
EXAMPLES::
sage: M = ModularSymbols(11).cuspidal_subspace()
sage: t, p = sage.modular.abvar.abvar.random_hecke_operator(M)
sage: p
3
sage: t, p = sage.modular.abvar.abvar.random_hecke_operator(M, t, p)
sage: p
5
"""
r = 0
while r == 0:
r = randrange(1,p//2+1) * ZZ.random_element()
t = (0 if t is None else t) + r*M.hecke_operator(p)
return t, next_prime(p)
def factor_new_space(M):
"""
Given a new space `M` of modular symbols, return the
decomposition into simple of `M` under the Hecke
operators.
INPUT:
- ``M`` - modular symbols space
OUTPUT: list of factors
EXAMPLES::
sage: M = ModularSymbols(37).cuspidal_subspace()
sage: sage.modular.abvar.abvar.factor_new_space(M)
[
Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 5 for Gamma_0(37) of weight 2 with sign 0 over Rational Field,
Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 5 for Gamma_0(37) of weight 2 with sign 0 over Rational Field
]
"""
t = None; p = 2
for i in range(200):
t, p = random_hecke_operator(M, t, p)
f = t.charpoly()
cube_free = True
for _, e in f.factor():
if e > 2:
cube_free = False
break
if cube_free:
return t.decomposition()
t, p = random_hecke_operator(M, t, p)
raise RuntimeError("unable to factor new space -- this should not happen") # should never happen
def factor_modsym_space_new_factors(M):
"""
Given an ambient modular symbols space, return complete
factorization of it.
INPUT:
- ``M`` - modular symbols space
OUTPUT: list of decompositions corresponding to each new space.
EXAMPLES::
sage: M = ModularSymbols(33)
sage: sage.modular.abvar.abvar.factor_modsym_space_new_factors(M)
[[
Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 3 for Gamma_0(11) of weight 2 with sign 0 over Rational Field
],
[
Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field
]]
"""
eps = M.character()
K = eps.conductor() if eps is not None else 1
N = [M.modular_symbols_of_level(d).cuspidal_subspace().new_subspace() \
for d in M.level().divisors() if d%K == 0 and (d == 11 or d >= 13)]
return [factor_new_space(A) for A in N]
def simple_factorization_of_modsym_space(M, simple=True):
"""
Return factorization of `M`. If simple is False, return
powers of simples.
INPUT:
- ``M`` - modular symbols space
- ``simple`` - bool (default: True)
OUTPUT: sequence
EXAMPLES::
sage: M = ModularSymbols(33)
sage: sage.modular.abvar.abvar.simple_factorization_of_modsym_space(M)
[
(11, 0, 1, Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field),
(11, 0, 3, Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field),
(33, 0, 1, Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field)
]
sage: sage.modular.abvar.abvar.simple_factorization_of_modsym_space(M, simple=False)
[
(11, 0, None, Modular Symbols subspace of dimension 4 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field),
(33, 0, None, Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field)
]
"""
D = []
N = M.level()
for G in factor_modsym_space_new_factors(M):
if len(G) > 0:
# Compute the matrices of the degeneracy maps up.
T = divisors(N//G[0].level())
degen = [G[0].ambient_module().degeneracy_map(N, t).matrix() for t in T]
# Construct a matrix with rows the basis for all the factors
# stacked on top of each other. We just multiply this by each
# degeneracy matrix to get the basis for the images of the
# factors at higher level. By doing matrix multiplies, we
# save time over taking images of individual factors.
matrix = G[0].basis_matrix()
for A in G[1:]:
matrix = matrix.stack(A.basis_matrix())
# Compute the actual images
ims = [matrix * z for z in degen]
# Construct the corresponding subspaces at higher level.
j = 0
for (isog,A) in enumerate(G):
d = A.dimension()
if simple:
for i in range(len(T)):
V = ims[i].matrix_from_rows(range(j, j+d)).row_module()
W = M.submodule(V, check=False)
D.append( (A.level(), isog, T[i], W) )
else:
V = sum(ims[i].matrix_from_rows(range(j, j+d)).row_module() for i in range(len(T)))
W = M.submodule(V, check=False)
D.append( (A.level(), isog, None, W))
j += d
return Sequence(D, cr=True)
def modsym_lattices(M, factors):
"""
Append lattice information to the output of
simple_factorization_of_modsym_space.
INPUT:
- ``M`` - modular symbols spaces
- ``factors`` - Sequence
(simple_factorization_of_modsym_space)
OUTPUT: sequence with more information for each factor (the
lattice)
EXAMPLES::
sage: M = ModularSymbols(33)
sage: factors = sage.modular.abvar.abvar.simple_factorization_of_modsym_space(M, simple=False)
sage: sage.modular.abvar.abvar.modsym_lattices(M, factors)
[
(11, 0, None, Modular Symbols subspace of dimension 4 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field, Free module of degree 6 and rank 4 over Integer Ring
Echelon basis matrix:
[ 1 0 0 0 -1 2]
[ 0 1 0 0 -1 1]
[ 0 0 1 0 -2 2]
[ 0 0 0 1 -1 -1]),
(33, 0, None, Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field, Free module of degree 6 and rank 2 over Integer Ring
Echelon basis matrix:
[ 1 0 0 -1 0 0]
[ 0 0 1 0 1 -1])
]
"""
# 1. Change basis of everything to the ambient integral modular symbols space
# 2. Clear denominator.
# 3. Echelonize/saturate each factor
if len(factors) == 0:
return factors
D = []
I = M.cuspidal_submodule().integral_structure().basis_matrix()
A = factors[0][-1].basis_matrix()
rows = [range(A.nrows())]
for F in factors[1:]:
mat = F[-1].basis_matrix()
i = rows[-1][-1]+1
rows.append(range(i, i + mat.nrows()))
A = A.stack(mat)
X = I.solve_left(A)
X, _ = X._clear_denom()
for i, R in enumerate(rows):
A = X.matrix_from_rows(R)
A = copy(A.saturation())
A.echelonize()
D.append(tuple(list(factors[i]) + [A.row_module()]))
return Sequence(D, cr=True) | sage: J = J0(11) |
0007_auto_20190803_0831.py | # Generated by Django 2.2.4 on 2019-08-03 08:31
from django.db import migrations, models
import users.models
class | (migrations.Migration):
dependencies = [
('users', '0006_auto_20190803_0830'),
]
operations = [
migrations.AlterField(
model_name='customuser',
name='_image',
field=models.ImageField(blank=True, null=True, upload_to=users.models.save_image, verbose_name='image'),
),
migrations.AlterField(
model_name='customuser',
name='_image_thumb',
field=models.ImageField(blank=True, null=True, upload_to=users.models.save_thumb, verbose_name='image_thumb'),
),
]
| Migration |
services_container.py | # Copyright 2021 ONDEWO GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dataclasses import dataclass
from ondewo.qa.services.qa import QA
from ondewo.utils.base_service_container import BaseServicesContainer
@dataclass
class | (BaseServicesContainer):
qa: QA
| ServicesContainer |
constrain.py | # Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradient transformations used to enforce specific constraints."""
from typing import Any, NamedTuple
import jax
import jax.numpy as jnp
from optax._src import base
# pylint:disable=no-value-for-parameter
NonNegativeParamsState = base.EmptyState
def keep_params_nonnegative() -> base.GradientTransformation:
"""Modifies the updates to keep parameters non-negative, i.e. >= 0.
This transformation ensures that parameters after the update will be
larger than or equal to zero.
In a chain of transformations, this should be the last one.
WARNING: the transformation expects input params to be non-negative.
When params is negative the transformed update will move them to 0.
Returns:
An (init_fn, update_fn) tuple.
"""
def init_fn(_):
return NonNegativeParamsState()
def update_fn(updates, state, params):
if params is None:
raise ValueError(base.NO_PARAMS_MSG)
updates = jax.tree_multimap(
lambda p, u: jnp.where((p + u) < 0., -p, u), params, updates)
return updates, state
return base.GradientTransformation(init_fn, update_fn)
class ZeroNansState(NamedTuple):
"""Contains a tree.
The entry `found_nan` has the same tree structure as that of the parameters.
Each leaf is a single boolean which contains True iff a NaN was detected in
the corresponding parameter array at the last call to `update`.
"""
found_nan: Any
def zero_nans() -> base.GradientTransformation:
"""A transformation which replaces NaNs with 0.
Zeroing values in gradients is guaranteed to produce a direction of
non-increasing loss.
The state of the transformation has the same tree structure as that of the
parameters. Each leaf is a single boolean which contains True iff a NaN was
detected in the corresponding parameter array at the last call to `update`.
This state is not used by the transformation internally, but lets users be
aware when NaNs have been zeroed out.
Returns:
A `GradientTransformation`.
"""
def init_fn(params):
return ZeroNansState(
jax.tree_map(lambda p: jnp.array(False, dtype=jnp.bool_), params))
def | (updates, opt_state, params=None):
del params
opt_state = ZeroNansState(
jax.tree_map(lambda p: jnp.any(jnp.isnan(p)), updates))
updates = jax.tree_map(
lambda p: jnp.where(jnp.isnan(p), jnp.zeros_like(p), p), updates)
return updates, opt_state
return base.GradientTransformation(init=init_fn, update=update_fn)
| update_fn |
player.rs | use crate::game_state::Vector2d;
use crate::*;
use core::time::Duration;
use std::collections::BTreeSet;
// The state of user input at some point in time. i.e. what buttons is
// the user holding down?
#[derive(Clone, Copy, Default, Debug, PartialEq, Eq)]
pub struct InputState {
pub up: bool,
pub down: bool,
pub left: bool,
pub right: bool,
pub kill: bool,
pub activate: bool,
pub report: bool,
pub play: bool,
pub skip_back: bool,
pub skip_forward: bool,
pub pause_playback: bool,
}
impl InputState {
// Returns an InputState with buttons set to true if they
// aren't pressed on self, but are set on newer_input.
fn get_new_presses(&self, newer_input: InputState) -> InputState {
InputState {
up: !self.up && newer_input.up,
down: !self.down && newer_input.down,
left: !self.left && newer_input.left,
right: !self.right && newer_input.right,
kill: !self.kill && newer_input.kill,
activate: !self.activate && newer_input.activate,
report: !self.report && newer_input.report,
play: !self.play && newer_input.play,
skip_back: !self.skip_back && newer_input.skip_back,
skip_forward: !self.skip_forward && newer_input.skip_forward,
pause_playback: !self.pause_playback && newer_input.pause_playback,
}
}
}
// A game from the perspective of a specific player
pub struct GameAsPlayer {
pub my_uuid: UUID,
inputs: InputState,
pub state: GameState,
pub socket: Box<dyn GameTx>,
pub contextual_state: ContextualState,
pub displayed_messages: Vec<DisplayMessage>,
}
// A game from the perspective of a particular player.
impl GameAsPlayer {
pub fn new(uuid: UUID, socket: Box<dyn GameTx>) -> GameAsPlayer {
GameAsPlayer {
state: GameState::new(),
inputs: InputState::default(),
contextual_state: ContextualState::Blank,
my_uuid: uuid,
socket,
displayed_messages: Vec::new(),
}
}
// Is there a way to avoid duplicating the logic between local_player and local_player_mut?
pub fn local_player(&self) -> Option<&Player> {
self.state.players.get(&self.my_uuid)
}
fn local_player_mut(&mut self) -> Option<&mut Player> {
self.state.players.get_mut(&self.my_uuid)
}
pub fn inputs(&self) -> InputState {
self.inputs
}
pub fn simulate(&mut self, elapsed: Duration) -> bool {
// Tick down time for our displayed messages, and drop the ones
// whose durations have expired.
for message in self.displayed_messages.iter_mut() {
message.pass_time(elapsed);
}
self.displayed_messages.retain(|m| !m.is_expired());
self.state.simulate(elapsed)
}
// Take the given inputs from the local player
pub fn take_input(&mut self, new_input: InputState) -> Result<(), String> {
match &self.state.status {
GameStatus::Lobby | GameStatus::Playing(PlayState::Night) => self.take_night_input(new_input),
GameStatus::Playing(PlayState::Voting(day_state)) => {
let updated_voting_state = self.take_day_input(day_state, new_input)?;
if let Some(updated_voting_state) = updated_voting_state {
match &mut self.contextual_state {
ContextualState::Blank => return Err("Internal error, bad contextual state".into()),
ContextualState::Voting(voting) => {
*voting = updated_voting_state;
}
}
}
self.inputs = new_input;
Ok(())
}
GameStatus::Playing(PlayState::TallyingVotes(_))
| GameStatus::Playing(PlayState::ViewingOutcome(_))
| GameStatus::Connecting
| GameStatus::Won(_)
| GameStatus::Disconnected => {
// Nothing to do
Ok(())
}
}
}
fn take_night_input(&mut self, new_input: InputState) -> Result<(), String> {
let current_input = self.inputs;
let player = match self.local_player_mut() {
None => return Ok(()),
Some(p) => p,
};
if new_input == current_input {
return Ok(()); // quick exit for the boring case
}
// Read the parts of the local player that we care about.
let is_killing = player.impostor && !current_input.kill && new_input.kill;
let position = player.position;
let activating = !current_input.activate && new_input.activate;
let reporting = !current_input.report && new_input.report;
let starting_play =
self.state.status == GameStatus::Lobby && !current_input.play && new_input.play;
self.inputs = new_input;
// ok, we're done touching player at this point. we redeclare it
// below so we can use it again, next time mutably.
if is_killing {
self.kill_player_near(position)?;
}
if activating {
self.activate_near(position)?;
}
if starting_play {
self.start()?;
}
if reporting {
self.report_body_near(position)?;
}
let speed_changed: bool;
{
let new_speed = self.get_speed();
let player = self.local_player_mut().unwrap();
speed_changed = new_speed != player.velocity;
player.velocity = new_speed;
}
// This way we don't send a MoveMessage unless movement keys actually changed,
// reducing data leakage to HAXXORZ.
if speed_changed {
let player = self.local_player().unwrap();
self.socket.send(&ClientToServerMessage::Move(MoveMessage {
speed: player.velocity,
position: player.position,
}))?;
}
Ok(())
}
fn take_day_input(
&self,
day_state: &VotingState,
new_input: InputState,
) -> Result<Option<VotingUiState>, String> {
let pressed = self.inputs.get_new_presses(new_input);
let player = match self.local_player() {
None => {
// Spectators don't get a vote.
return Ok(None);
}
Some(p) => p,
};
if player.dead {
// The dead don't get a vote.
return Ok(None);
}
let has_voted = day_state.votes.contains_key(&player.uuid);
if has_voted {
// Nothing to do but wait if you've already voted.
return Ok(None);
}
let mut voting_state = match self.contextual_state {
ContextualState::Voting(voting) => voting,
ContextualState::Blank => {
return Err(
"Internal Error: expected to be in Voting contextual state during the day.".to_string(),
)
}
};
let mut vote_targets: Vec<TargetInVotingTable> = self
.state
.players
.iter()
.enumerate()
.filter(|(_, (_, p))| !p.dead)
.map(|(idx, (uuid, _p))| TargetInVotingTable::new(idx, VoteTarget::Player { uuid: *uuid }))
.collect();
vote_targets.push(TargetInVotingTable::new(10, VoteTarget::Skip));
match voting_state.highlighted_player {
None => {
if pressed.up || pressed.down || pressed.left || pressed.right {
// Nothing was highlighted, so highlight the first target player.
voting_state.highlighted_player = vote_targets.first().map(|vt| vt.target);
}
}
Some(highlighted) => {
let mut highlighted: TargetInVotingTable = *vote_targets
.iter()
.find(|vt| vt.target == highlighted)
.ok_or_else(|| "Internal Error: Highlighting a nonexistant player?".to_string())?;
if pressed.up {
let mut closest_same_column_above: Option<TargetInVotingTable> = None;
let mut closest_above: Option<TargetInVotingTable> = None;
for p in vote_targets.iter() {
if p.y >= highlighted.y {
break; // no longer above
}
if p.x == highlighted.x {
closest_same_column_above = Some(*p);
} else {
closest_above = Some(*p);
}
}
highlighted =
closest_same_column_above.unwrap_or_else(|| closest_above.unwrap_or(highlighted));
}
if pressed.down {
let mut closest_same_column_below: Option<TargetInVotingTable> = None;
let mut closest_below: Option<TargetInVotingTable> = None;
for p in vote_targets.iter() {
if p.y <= highlighted.y {
continue; // not below
}
if p.x == highlighted.x && closest_same_column_below.is_none() {
closest_same_column_below = Some(*p);
} else if closest_below.is_none() {
closest_below = Some(*p);
}
}
highlighted =
closest_same_column_below.unwrap_or_else(|| closest_below.unwrap_or(highlighted));
}
if pressed.left && highlighted.x == 1 {
let mut closest_left_column_above: Option<TargetInVotingTable> = None;
let mut first_in_left_column: Option<TargetInVotingTable> = None;
for p in vote_targets.iter() {
if p.x != 0 {
continue; // not in left column
}
if p.y <= highlighted.y {
closest_left_column_above = Some(*p);
} else if first_in_left_column.is_none() {
first_in_left_column = Some(*p);
}
}
highlighted = closest_left_column_above
.unwrap_or_else(|| first_in_left_column.unwrap_or(highlighted));
}
if pressed.right && highlighted.x == 0 {
let mut closest_right_column_above: Option<TargetInVotingTable> = None;
let mut first_in_right_column: Option<TargetInVotingTable> = None;
for p in vote_targets.iter() {
if p.x != 1 {
continue; // not in right column
}
if p.y <= highlighted.y {
closest_right_column_above = Some(*p);
} else if first_in_right_column.is_none() {
first_in_right_column = Some(*p);
}
}
highlighted = closest_right_column_above
.unwrap_or_else(|| first_in_right_column.unwrap_or(highlighted));
}
voting_state.highlighted_player = Some(highlighted.target);
}
}
if pressed.activate {
if let Some(target) = voting_state.highlighted_player {
self.socket.send(&ClientToServerMessage::Vote { target })?;
voting_state.highlighted_player = None;
}
}
Ok(Some(voting_state))
}
fn get_speed(&self) -> Velocity {
let mut dx = 0.0;
let mut dy = 0.0;
if self.inputs.up && !self.inputs.down {
dy = -self.state.settings.speed
} else if self.inputs.down {
dy = self.state.settings.speed
}
if self.inputs.left && !self.inputs.right {
dx = -self.state.settings.speed
} else if self.inputs.right {
dx = self.state.settings.speed
}
Velocity { dx, dy }
}
fn kill_player_near(&mut self, position: Position) -> Result<(), String> {
let mut killed_player: Option<DeadBody> = None;
let mut closest_distance = self.state.settings.kill_distance;
for (_, player) in self.state.players.iter_mut() {
if player.impostor || player.uuid == self.my_uuid || player.dead {
continue;
}
let distance = position.distance(&player.position);
if distance < closest_distance {
killed_player = Some(DeadBody {
position: player.position,
color: player.color,
});
closest_distance = distance;
}
}
if let Some(body) = killed_player {
self.state.note_death(body)?;
self.socket.send(&ClientToServerMessage::Killed(body))?;
// Move the killer on top of the new body.
if let Some(player) = self.local_player_mut() {
player.position = body.position;
}
}
Ok(())
}
fn activate_near(&mut self, position: Position) -> Result<(), String> {
let mut closest_distance = self.state.settings.task_distance;
let local_player = match self.local_player_mut() {
Some(player) => player,
None => return Ok(()),
};
let is_imp = local_player.impostor;
let mut finished_task: Option<FinishedTask> = None;
for (index, task) in local_player.tasks.iter().enumerate() {
let distance = position.distance(&task.position);
if distance < closest_distance {
finished_task = Some(FinishedTask { index });
closest_distance = distance;
}
}
if let Some(finished_task) = finished_task {
if !is_imp {
self.state.note_finished_task(self.my_uuid, finished_task)?;
self
.socket
.send(&ClientToServerMessage::FinishedTask(finished_task))?;
}
}
Ok(())
}
fn report_body_near(&mut self, position: Position) -> Result<(), String> {
let mut closest_distance = self.state.settings.report_distance;
let mut nearest_body_color: Option<Color> = None;
for body in self.state.bodies.iter() {
let distance = position.distance(&body.position);
if distance < closest_distance {
nearest_body_color = Some(body.color);
closest_distance = distance;
}
}
if let Some(color) = nearest_body_color {
self.socket.send(&ClientToServerMessage::ReportBody {
dead_body_color: color,
})?;
}
Ok(())
}
pub fn disconnected(&mut self) -> Result<(), String> {
match self.state.status {
GameStatus::Won(_) => (), // do nothing, this is expected
_ => self.update_status(GameStatus::Disconnected),
};
Ok(())
}
// Returns the distance that this client should be able to see.
// Returns None is they should see everything.
pub fn vision(&self) -> Option<f64> {
self
.local_player()
.map(|p| p.vision(&self.state.settings, &self.state.status))
.flatten()
}
// Returns whether this client is for a player that won.
// Returns None if the client is a spectator.
pub fn has_won(&self, winning_team: &Team) -> Option<bool> {
match self.local_player() {
None => None,
Some(p) => {
let impostor_won = match winning_team {
Team::Crew => false,
Team::Impostors => true,
};
Some(p.impostor == impostor_won)
}
}
}
pub fn handle_msg(&mut self, message: ServerToClientMessage) -> Result<(), String> {
console_log!("Player handling message: {}", message.kind());
match message {
ServerToClientMessage::Welcome {
connection_id: uuid,
} => {
self.my_uuid = uuid;
}
ServerToClientMessage::Snapshot(Snapshot {
status,
bodies,
players,
}) => {
self.update_status(status);
self.state.bodies = bodies;
// handle disconnections
let server_uuids: BTreeSet<_> = players.iter().map(|p| p.uuid).collect();
let local_uuids: BTreeSet<_> = self.state.players.iter().map(|(u, _)| *u).collect();
for uuid in local_uuids.difference(&server_uuids) {
self.state.players.remove(uuid);
}
for player in players {
match self.state.players.get_mut(&player.uuid) {
None => {
self.state.players.insert(player.uuid, player);
}
Some(local_player) => {
let Player {
name,
uuid: _uuid,
color,
dead,
impostor,
tasks,
position,
velocity: speed,
} = player;
local_player.name = name;
local_player.color = color;
local_player.dead = dead;
local_player.impostor = impostor;
local_player.tasks = tasks;
// Always trust our local speed over the server
if player.uuid != self.my_uuid {
local_player.velocity = speed;
}
// Avoid jitter by ignoring position updates (and instead use local reconning
// based on speeds) unless the distance is greater than some small amount.
if position.distance(&local_player.position) > 30.0 {
local_player.position = position;
}
}
}
}
}
ServerToClientMessage::Replay(_recorded_game) => {
// Nothing to handle here. The JS client handles this itself.
}
ServerToClientMessage::DisplayMessage(display_message) => {
self.displayed_messages.push(display_message);
}
} | }
fn start(&mut self) -> Result<(), String> {
self.socket.send(&ClientToServerMessage::StartGame())?;
Ok(())
}
fn update_status(&mut self, new_status: GameStatus) {
if !self.state.status.is_same_kind(&new_status) {
self.inputs = InputState::default();
}
if let GameStatus::Playing(PlayState::Voting(_)) = new_status {
match self.contextual_state {
ContextualState::Voting(_) => (),
_ => self.contextual_state = ContextualState::Voting(VotingUiState::default()),
}
} else {
match self.contextual_state {
ContextualState::Blank => (),
_ => self.contextual_state = ContextualState::Blank,
}
}
self.state.status = new_status;
}
}
// This is terrible design lol. Integrate with game.status maybe?
pub enum ContextualState {
Blank,
Voting(VotingUiState),
}
#[derive(Default, Debug, Copy, Clone)]
pub struct VotingUiState {
pub highlighted_player: Option<VoteTarget>,
}
pub trait GameTx {
fn send(&self, message: &ClientToServerMessage) -> Result<(), String>;
}
#[derive(Clone, Copy)]
struct TargetInVotingTable {
x: usize,
y: usize,
target: VoteTarget,
}
impl TargetInVotingTable {
fn new(index: usize, target: VoteTarget) -> Self {
TargetInVotingTable {
x: index % 2,
y: index / 2,
target,
}
}
} | Ok(()) |
dom_character_data.rs | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files.git)
// DO NOT EDIT
use crate::DOMEventTarget;
use crate::DOMNode;
use crate::DOMObject;
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
use std::ptr;
glib::wrapper! {
pub struct DOMCharacterData(Object<ffi::WebKitDOMCharacterData, ffi::WebKitDOMCharacterDataClass>) @extends DOMNode, DOMObject, @implements DOMEventTarget;
match fn {
type_ => || ffi::webkit_dom_character_data_get_type(),
}
}
pub const NONE_DOM_CHARACTER_DATA: Option<&DOMCharacterData> = None;
pub trait DOMCharacterDataExt: 'static {
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_character_data_append_data")]
fn append_data(&self, data: &str) -> Result<(), glib::Error>;
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_character_data_delete_data")]
fn delete_data(&self, offset: libc::c_ulong, length: libc::c_ulong) -> Result<(), glib::Error>;
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_character_data_get_data")]
fn data(&self) -> Option<glib::GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_character_data_get_length")]
fn length(&self) -> libc::c_ulong;
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_character_data_insert_data")]
fn insert_data(&self, offset: libc::c_ulong, data: &str) -> Result<(), glib::Error>;
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_character_data_replace_data")]
fn replace_data(
&self,
offset: libc::c_ulong,
length: libc::c_ulong,
data: &str,
) -> Result<(), glib::Error>;
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_character_data_set_data")]
fn set_data(&self, value: &str) -> Result<(), glib::Error>;
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_character_data_substring_data")]
fn substring_data(
&self,
offset: libc::c_ulong,
length: libc::c_ulong,
) -> Result<glib::GString, glib::Error>;
fn connect_property_data_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_length_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<DOMCharacterData>> DOMCharacterDataExt for O {
fn append_data(&self, data: &str) -> Result<(), glib::Error> {
unsafe {
let mut error = ptr::null_mut();
let _ = ffi::webkit_dom_character_data_append_data(
self.as_ref().to_glib_none().0,
data.to_glib_none().0,
&mut error,
);
if error.is_null() {
Ok(())
} else {
Err(from_glib_full(error))
}
}
}
fn delete_data(&self, offset: libc::c_ulong, length: libc::c_ulong) -> Result<(), glib::Error> {
unsafe {
let mut error = ptr::null_mut();
let _ = ffi::webkit_dom_character_data_delete_data(
self.as_ref().to_glib_none().0,
offset,
length,
&mut error,
);
if error.is_null() {
Ok(())
} else {
Err(from_glib_full(error))
}
}
}
fn data(&self) -> Option<glib::GString> {
unsafe {
from_glib_full(ffi::webkit_dom_character_data_get_data(
self.as_ref().to_glib_none().0,
))
}
}
fn length(&self) -> libc::c_ulong {
unsafe { ffi::webkit_dom_character_data_get_length(self.as_ref().to_glib_none().0) }
}
fn insert_data(&self, offset: libc::c_ulong, data: &str) -> Result<(), glib::Error> {
unsafe {
let mut error = ptr::null_mut();
let _ = ffi::webkit_dom_character_data_insert_data(
self.as_ref().to_glib_none().0,
offset,
data.to_glib_none().0,
&mut error,
);
if error.is_null() {
Ok(())
} else {
Err(from_glib_full(error))
}
}
}
fn replace_data(
&self,
offset: libc::c_ulong,
length: libc::c_ulong,
data: &str,
) -> Result<(), glib::Error> {
unsafe {
let mut error = ptr::null_mut();
let _ = ffi::webkit_dom_character_data_replace_data(
self.as_ref().to_glib_none().0,
offset,
length,
data.to_glib_none().0,
&mut error,
);
if error.is_null() {
Ok(())
} else {
Err(from_glib_full(error))
}
}
}
fn set_data(&self, value: &str) -> Result<(), glib::Error> {
unsafe {
let mut error = ptr::null_mut();
let _ = ffi::webkit_dom_character_data_set_data(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
&mut error,
);
if error.is_null() {
Ok(())
} else {
Err(from_glib_full(error))
}
}
}
fn substring_data(
&self,
offset: libc::c_ulong,
length: libc::c_ulong,
) -> Result<glib::GString, glib::Error> {
unsafe {
let mut error = ptr::null_mut();
let ret = ffi::webkit_dom_character_data_substring_data(
self.as_ref().to_glib_none().0,
offset,
length,
&mut error,
);
if error.is_null() {
Ok(from_glib_full(ret))
} else {
Err(from_glib_full(error))
}
}
}
fn connect_property_data_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_data_trampoline<P, F: Fn(&P) + 'static>(
this: *mut ffi::WebKitDOMCharacterData,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) where
P: IsA<DOMCharacterData>,
{ | let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::data\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_data_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
fn connect_property_length_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_length_trampoline<P, F: Fn(&P) + 'static>(
this: *mut ffi::WebKitDOMCharacterData,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) where
P: IsA<DOMCharacterData>,
{
let f: &F = &*(f as *const F);
f(&DOMCharacterData::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::length\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_length_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for DOMCharacterData {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("DOMCharacterData")
}
} | let f: &F = &*(f as *const F);
f(&DOMCharacterData::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe { |
splunk_hec.rs | use crate::{
config::{DataType, SinkConfig, SinkContext, SinkDescription},
event::{self, Event, LogEvent, Value},
internal_events::{
SplunkEventEncodeError, SplunkEventSent, SplunkSourceMissingKeys,
SplunkSourceTypeMissingKeys,
},
sinks::util::{
encoding::{EncodingConfigWithDefault, EncodingConfiguration},
http::{BatchedHttpSink, HttpClient, HttpSink},
BatchConfig, BatchSettings, Buffer, Compression, InFlightLimit, TowerRequestConfig,
},
template::Template,
tls::{TlsOptions, TlsSettings},
};
use futures::{FutureExt, TryFutureExt};
use futures01::Sink;
use http::{Request, StatusCode, Uri};
use hyper::Body;
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize};
use serde_json::json;
use snafu::{ResultExt, Snafu};
use std::convert::TryFrom;
use string_cache::DefaultAtom as Atom;
#[derive(Debug, Snafu)]
pub enum BuildError {
#[snafu(display("Host must include a scheme (https:// or http://)"))]
UriMissingScheme,
}
#[derive(Deserialize, Serialize, Debug, Clone, Default)]
#[serde(deny_unknown_fields)]
pub struct HecSinkConfig {
pub token: String,
// Deprecated name
#[serde(alias = "host")]
pub endpoint: String,
#[serde(default = "default_host_key")]
pub host_key: Atom,
#[serde(default)]
pub indexed_fields: Vec<Atom>,
pub index: Option<String>,
pub sourcetype: Option<Template>,
pub source: Option<Template>,
#[serde(
skip_serializing_if = "crate::serde::skip_serializing_if_default",
default
)]
pub encoding: EncodingConfigWithDefault<Encoding>,
#[serde(default)]
pub compression: Compression,
#[serde(default)]
pub batch: BatchConfig,
#[serde(default)]
pub request: TowerRequestConfig,
pub tls: Option<TlsOptions>,
}
lazy_static! {
static ref REQUEST_DEFAULTS: TowerRequestConfig = TowerRequestConfig {
in_flight_limit: InFlightLimit::Fixed(10),
rate_limit_num: Some(10),
..Default::default()
};
}
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Derivative)]
#[serde(rename_all = "snake_case")]
#[derivative(Default)]
pub enum Encoding {
#[derivative(Default)]
Text,
Json,
}
fn default_host_key() -> Atom {
event::LogSchema::default().host_key().clone()
}
inventory::submit! {
SinkDescription::new::<HecSinkConfig>("splunk_hec")
}
#[typetag::serde(name = "splunk_hec")]
impl SinkConfig for HecSinkConfig {
fn build(&self, cx: SinkContext) -> crate::Result<(super::RouterSink, super::Healthcheck)> {
validate_host(&self.endpoint)?;
let batch = BatchSettings::default()
.bytes(bytesize::mib(1u64))
.timeout(1)
.parse_config(self.batch)?;
let request = self.request.unwrap_with(&REQUEST_DEFAULTS);
let tls_settings = TlsSettings::from_options(&self.tls)?;
let client = HttpClient::new(cx.resolver(), tls_settings)?;
let sink = BatchedHttpSink::new(
self.clone(),
Buffer::new(batch.size, self.compression),
request,
batch.timeout,
client.clone(),
cx.acker(),
)
.sink_map_err(|e| error!("Fatal splunk_hec sink error: {}", e));
let healthcheck = healthcheck(self.clone(), client).boxed().compat();
Ok((Box::new(sink), Box::new(healthcheck)))
}
fn input_type(&self) -> DataType {
DataType::Log
}
fn sink_type(&self) -> &'static str {
"splunk_hec"
}
}
#[async_trait::async_trait]
impl HttpSink for HecSinkConfig {
type Input = Vec<u8>;
type Output = Vec<u8>;
fn encode_event(&self, mut event: Event) -> Option<Self::Input> {
self.encoding.apply_rules(&mut event);
let sourcetype = self.sourcetype.as_ref().and_then(|sourcetype| {
sourcetype
.render_string(&event)
.map_err(|missing_keys| {
emit!(SplunkSourceTypeMissingKeys { keys: missing_keys });
})
.ok()
});
let source = self.source.as_ref().and_then(|source| {
source
.render_string(&event)
.map_err(|missing_keys| {
emit!(SplunkSourceMissingKeys { keys: missing_keys });
})
.ok()
});
let mut event = event.into_log();
let host = event.get(&self.host_key).cloned();
let timestamp = match event.remove(&event::log_schema().timestamp_key()) {
Some(Value::Timestamp(ts)) => ts,
_ => chrono::Utc::now(),
};
let timestamp = (timestamp.timestamp_millis() as f64) / 1000f64;
let fields = self
.indexed_fields
.iter()
.filter_map(|field| event.get(field).map(|value| (field, value.clone())))
.collect::<LogEvent>();
let event = match self.encoding.codec() {
Encoding::Json => json!(event),
Encoding::Text => json!(event
.get(&event::log_schema().message_key())
.map(|v| v.to_string_lossy())
.unwrap_or_else(|| "".into())),
};
let mut body = json!({
"event": event,
"fields": fields,
"time": timestamp
});
if let Some(host) = host {
let host = host.to_string_lossy();
body["host"] = json!(host);
}
if let Some(index) = &self.index {
body["index"] = json!(index);
}
if let Some(source) = source {
body["source"] = json!(source);
}
if let Some(sourcetype) = &sourcetype {
body["sourcetype"] = json!(sourcetype);
}
match serde_json::to_vec(&body) {
Ok(value) => {
emit!(SplunkEventSent {
byte_size: value.len()
});
Some(value)
}
Err(e) => {
emit!(SplunkEventEncodeError { error: e });
None
}
}
}
async fn build_request(&self, events: Self::Output) -> crate::Result<Request<Vec<u8>>> {
let uri =
build_uri(&self.endpoint, "/services/collector/event").expect("Unable to parse URI");
let mut builder = Request::post(uri)
.header("Content-Type", "application/json")
.header("Authorization", format!("Splunk {}", self.token));
if let Some(ce) = self.compression.content_encoding() {
builder = builder.header("Content-Encoding", ce);
}
builder.body(events).map_err(Into::into)
}
}
#[derive(Debug, Snafu)]
enum HealthcheckError {
#[snafu(display("Invalid HEC token"))]
InvalidToken,
#[snafu(display("Queues are full"))]
QueuesFull,
}
pub async fn healthcheck(config: HecSinkConfig, mut client: HttpClient) -> crate::Result<()> {
let uri = build_uri(&config.endpoint, "/services/collector/health/1.0")
.context(super::UriParseError)?;
let request = Request::get(uri)
.header("Authorization", format!("Splunk {}", config.token))
.body(Body::empty())
.unwrap();
let response = client.send(request).await?;
match response.status() {
StatusCode::OK => Ok(()),
StatusCode::BAD_REQUEST => Err(HealthcheckError::InvalidToken.into()),
StatusCode::SERVICE_UNAVAILABLE => Err(HealthcheckError::QueuesFull.into()),
other => Err(super::HealthcheckError::UnexpectedStatus { status: other }.into()),
}
}
pub fn validate_host(host: &str) -> crate::Result<()> {
let uri = Uri::try_from(host).context(super::UriParseError)?;
match uri.scheme() {
Some(_) => Ok(()),
None => Err(Box::new(BuildError::UriMissingScheme)),
}
}
fn build_uri(host: &str, path: &str) -> Result<Uri, http::uri::InvalidUri> {
format!("{}{}", host.trim_end_matches('/'), path).parse::<Uri>()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::event::Event;
use crate::sinks::util::{http::HttpSink, test::load_sink};
use chrono::Utc;
use serde::Deserialize;
use std::collections::BTreeMap;
#[derive(Deserialize, Debug)]
struct HecEventJson {
time: f64,
event: BTreeMap<String, String>,
fields: BTreeMap<String, String>,
}
#[derive(Deserialize, Debug)]
struct HecEventText {
time: f64,
event: String,
fields: BTreeMap<String, String>,
}
#[test]
fn splunk_encode_event_json() {
let mut event = Event::from("hello world");
event.as_mut_log().insert("key", "value");
let (config, _cx) = load_sink::<HecSinkConfig>(
r#"
host = "test.com"
token = "alksjdfo"
host_key = "host"
indexed_fields = ["key"]
[encoding]
codec = "json"
"#,
)
.unwrap();
let bytes = config.encode_event(event).unwrap();
let hec_event = serde_json::from_slice::<HecEventJson>(&bytes[..]).unwrap();
let event = &hec_event.event;
let kv = event.get(&"key".to_string()).unwrap();
assert_eq!(kv, &"value".to_string());
assert_eq!(
event[&event::log_schema().message_key().to_string()],
"hello world".to_string()
);
assert!(event
.get(&event::log_schema().timestamp_key().to_string())
.is_none());
assert_eq!(
hec_event.fields.get("key").map(|s| s.as_str()),
Some("value")
);
let now = Utc::now().timestamp_millis() as f64 / 1000f64;
assert!(
(hec_event.time - now).abs() < 0.2,
format!("hec_event.time = {}, now = {}", hec_event.time, now)
);
assert_eq!((hec_event.time * 1000f64).fract(), 0f64);
}
#[test]
fn splunk_encode_event_text() {
let mut event = Event::from("hello world");
event.as_mut_log().insert("key", "value");
let (config, _cx) = load_sink::<HecSinkConfig>(
r#"
host = "test.com"
token = "alksjdfo"
host_key = "host"
indexed_fields = ["key"]
[encoding]
codec = "text"
"#,
)
.unwrap();
let bytes = config.encode_event(event).unwrap();
let hec_event = serde_json::from_slice::<HecEventText>(&bytes[..]).unwrap();
assert_eq!(hec_event.event.as_str(), "hello world");
assert_eq!(
hec_event.fields.get("key").map(|s| s.as_str()),
Some("value")
);
let now = Utc::now().timestamp_millis() as f64 / 1000f64;
assert!(
(hec_event.time - now).abs() < 0.2,
format!("hec_event.time = {}, now = {}", hec_event.time, now)
);
assert_eq!((hec_event.time * 1000f64).fract(), 0f64);
}
#[test]
fn splunk_validate_host() {
let valid = "http://localhost:8888".to_string();
let invalid_scheme = "localhost:8888".to_string();
let invalid_uri = "iminvalidohnoes".to_string();
assert!(validate_host(&valid).is_ok());
assert!(validate_host(&invalid_scheme).is_err());
assert!(validate_host(&invalid_uri).is_err());
}
#[test]
fn splunk_build_uri() {
let uri = build_uri("http://test.com/", "/a");
assert!(uri.is_ok());
assert_eq!(format!("{}", uri.unwrap()), "http://test.com/a");
}
}
#[cfg(test)]
#[cfg(feature = "splunk-integration-tests")]
mod integration_tests {
use super::*;
use crate::test_util::wait_for_tcp_duration;
use crate::{
assert_downcast_matches,
config::{SinkConfig, SinkContext},
sinks,
test_util::{random_lines_with_stream, random_string},
Event,
};
use futures::{
compat::{Future01CompatExt, Sink01CompatExt},
SinkExt,
};
use futures01::Sink;
use serde_json::Value as JsonValue;
use std::net::SocketAddr;
use tokio::time::{delay_for, Duration};
use warp::Filter;
const USERNAME: &str = "admin";
const PASSWORD: &str = "password"; | // we see it.
async fn find_entry(message: &str) -> serde_json::value::Value {
for _ in 0..20usize {
match recent_entries(None)
.await
.into_iter()
.find(|entry| entry["_raw"].as_str().unwrap_or("").contains(&message))
{
Some(value) => return value,
None => std::thread::sleep(std::time::Duration::from_millis(100)),
}
}
panic!("Didn't find event in Splunk");
}
#[tokio::test]
async fn splunk_insert_message() {
let cx = SinkContext::new_test();
let config = config(Encoding::Text, vec![]).await;
let (sink, _) = config.build(cx).unwrap();
let message = random_string(100);
let event = Event::from(message.clone());
sink.send(event).compat().await.unwrap();
let entry = find_entry(message.as_str()).await;
assert_eq!(message, entry["_raw"].as_str().unwrap());
assert!(entry.get("message").is_none());
}
#[tokio::test]
async fn splunk_insert_source() {
let cx = SinkContext::new_test();
let mut config = config(Encoding::Text, vec![]).await;
config.source = Template::try_from("/var/log/syslog".to_string()).ok();
let (sink, _) = config.build(cx).unwrap();
let message = random_string(100);
let event = Event::from(message.clone());
sink.send(event).compat().await.unwrap();
let entry = find_entry(message.as_str()).await;
assert_eq!(entry["source"].as_str(), Some("/var/log/syslog"));
}
#[tokio::test]
async fn splunk_insert_index() {
let cx = SinkContext::new_test();
let mut config = config(Encoding::Text, vec![]).await;
config.index = Some("custom_index".to_string());
let (sink, _) = config.build(cx).unwrap();
let message = random_string(100);
let event = Event::from(message.clone());
sink.send(event).compat().await.unwrap();
let entry = find_entry(message.as_str()).await;
assert_eq!(entry["index"].as_str().unwrap(), "custom_index");
}
#[tokio::test]
async fn splunk_insert_many() {
let cx = SinkContext::new_test();
let config = config(Encoding::Text, vec![]).await;
let (sink, _) = config.build(cx).unwrap();
let (messages, mut events) = random_lines_with_stream(100, 10);
let _ = sink.sink_compat().send_all(&mut events).await.unwrap();
let mut found_all = false;
for _ in 0..20 {
let entries = recent_entries(None).await;
found_all = messages.iter().all(|message| {
entries
.iter()
.any(|entry| entry["_raw"].as_str().unwrap() == message)
});
if found_all {
break;
}
delay_for(Duration::from_millis(100)).await;
}
assert!(found_all);
}
#[tokio::test]
async fn splunk_custom_fields() {
let cx = SinkContext::new_test();
let indexed_fields = vec![Atom::from("asdf")];
let config = config(Encoding::Json, indexed_fields).await;
let (sink, _) = config.build(cx).unwrap();
let message = random_string(100);
let mut event = Event::from(message.clone());
event.as_mut_log().insert("asdf", "hello");
sink.send(event).compat().await.unwrap();
let entry = find_entry(message.as_str()).await;
assert_eq!(message, entry["message"].as_str().unwrap());
let asdf = entry["asdf"].as_array().unwrap()[0].as_str().unwrap();
assert_eq!("hello", asdf);
}
#[tokio::test]
async fn splunk_hostname() {
let cx = SinkContext::new_test();
let indexed_fields = vec![Atom::from("asdf")];
let config = config(Encoding::Json, indexed_fields).await;
let (sink, _) = config.build(cx).unwrap();
let message = random_string(100);
let mut event = Event::from(message.clone());
event.as_mut_log().insert("asdf", "hello");
event.as_mut_log().insert("host", "example.com:1234");
sink.send(event).compat().await.unwrap();
let entry = find_entry(message.as_str()).await;
assert_eq!(message, entry["message"].as_str().unwrap());
let asdf = entry["asdf"].as_array().unwrap()[0].as_str().unwrap();
assert_eq!("hello", asdf);
let host = entry["host"].as_array().unwrap()[0].as_str().unwrap();
assert_eq!("example.com:1234", host);
}
#[tokio::test]
async fn splunk_sourcetype() {
let cx = SinkContext::new_test();
let indexed_fields = vec![Atom::from("asdf")];
let mut config = config(Encoding::Json, indexed_fields).await;
config.sourcetype = Template::try_from("_json".to_string()).ok();
let (sink, _) = config.build(cx).unwrap();
let message = random_string(100);
let mut event = Event::from(message.clone());
event.as_mut_log().insert("asdf", "hello");
sink.send(event).compat().await.unwrap();
let entry = find_entry(message.as_str()).await;
assert_eq!(message, entry["message"].as_str().unwrap());
let asdf = entry["asdf"].as_array().unwrap()[0].as_str().unwrap();
assert_eq!("hello", asdf);
let sourcetype = entry["sourcetype"].as_str().unwrap();
assert_eq!("_json", sourcetype);
}
#[tokio::test]
async fn splunk_configure_hostname() {
let cx = SinkContext::new_test();
let config = super::HecSinkConfig {
host_key: "roast".into(),
..config(Encoding::Json, vec![Atom::from("asdf")]).await
};
let (sink, _) = config.build(cx).unwrap();
let message = random_string(100);
let mut event = Event::from(message.clone());
event.as_mut_log().insert("asdf", "hello");
event.as_mut_log().insert("host", "example.com:1234");
event.as_mut_log().insert("roast", "beef.example.com:1234");
sink.send(event).compat().await.unwrap();
let entry = find_entry(message.as_str()).await;
assert_eq!(message, entry["message"].as_str().unwrap());
let asdf = entry["asdf"].as_array().unwrap()[0].as_str().unwrap();
assert_eq!("hello", asdf);
let host = entry["host"].as_array().unwrap()[0].as_str().unwrap();
assert_eq!("beef.example.com:1234", host);
}
#[tokio::test]
async fn splunk_healthcheck() {
let resolver = crate::dns::Resolver;
let config_to_healthcheck = move |config: super::HecSinkConfig| {
let tls_settings = TlsSettings::from_options(&config.tls).unwrap();
let client = HttpClient::new(resolver, tls_settings).unwrap();
sinks::splunk_hec::healthcheck(config, client)
};
// OK
{
let config = config(Encoding::Text, vec![]).await;
let healthcheck = config_to_healthcheck(config);
healthcheck.await.unwrap();
}
// Server not listening at address
{
let config = HecSinkConfig {
endpoint: "http://localhost:1111".to_string(),
..config(Encoding::Text, vec![]).await
};
let healthcheck = config_to_healthcheck(config);
healthcheck.await.unwrap_err();
}
// Invalid token
// The HEC REST docs claim that the healthcheck endpoint will validate the auth token,
// but my local testing server returns 200 even with a bad token.
// {
// let healthcheck = sinks::splunk::healthcheck(
// "wrong".to_string(),
// "http://localhost:8088".to_string(),
// )
// .unwrap();
// assert_eq!(rt.block_on(healthcheck).unwrap_err(), "Invalid HEC token");
// }
// Unhealthy server
{
let config = HecSinkConfig {
endpoint: "http://localhost:5503".to_string(),
..config(Encoding::Text, vec![]).await
};
let unhealthy = warp::any()
.map(|| warp::reply::with_status("i'm sad", StatusCode::SERVICE_UNAVAILABLE));
let server = warp::serve(unhealthy).bind("0.0.0.0:5503".parse::<SocketAddr>().unwrap());
tokio::spawn(server);
let healthcheck = config_to_healthcheck(config);
assert_downcast_matches!(
healthcheck.await.unwrap_err(),
HealthcheckError,
HealthcheckError::QueuesFull
);
}
}
async fn recent_entries(index: Option<&str>) -> Vec<JsonValue> {
let client = reqwest::Client::builder()
.danger_accept_invalid_certs(true)
.build()
.unwrap();
// https://docs.splunk.com/Documentation/Splunk/7.2.1/RESTREF/RESTsearch#search.2Fjobs
let search_query = match index {
Some(index) => format!("search index={}", index),
None => "search *".into(),
};
let res = client
.post("https://localhost:8089/services/search/jobs?output_mode=json")
.form(&vec![
("search", &search_query[..]),
("exec_mode", "oneshot"),
("f", "*"),
])
.basic_auth(USERNAME, Some(PASSWORD))
.send()
.await
.unwrap();
let json: JsonValue = res.json().await.unwrap();
println!("output: {:?}", json);
json["results"].as_array().unwrap().clone()
}
async fn config(
encoding: impl Into<EncodingConfigWithDefault<Encoding>>,
indexed_fields: Vec<Atom>,
) -> super::HecSinkConfig {
super::HecSinkConfig {
endpoint: "http://localhost:8088/".into(),
token: get_token().await,
host_key: "host".into(),
compression: Compression::None,
encoding: encoding.into(),
batch: BatchConfig {
max_events: Some(1),
..Default::default()
},
indexed_fields,
..Default::default()
}
}
async fn get_token() -> String {
let client = reqwest::Client::builder()
.danger_accept_invalid_certs(true)
.build()
.unwrap();
// Wait for port 8089 to be reachable before firing off request
wait_for_tcp_duration("127.0.0.1:8089".parse().unwrap(), Duration::from_secs(30)).await;
let res = client
.get("https://localhost:8089/services/data/inputs/http?output_mode=json")
.basic_auth(USERNAME, Some(PASSWORD))
.send()
.await
.unwrap();
let json: JsonValue = res.json().await.unwrap();
let entries = json["entry"].as_array().unwrap().clone();
if entries.is_empty() {
// TODO: create one automatically
panic!("You don't have any HTTP Event Collector inputs set up in Splunk");
}
entries[0]["content"]["token"].as_str().unwrap().to_owned()
}
} |
// It usually takes ~1 second for the event to show up in search, so poll until |
broadcast.go | /*
Copyright IBM Corp. 2016 All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package kafka
import (
"fmt"
"sync"
"time"
"github.com/hyperledger/fabric/orderer/config"
cb "github.com/hyperledger/fabric/protos/common"
ab "github.com/hyperledger/fabric/protos/orderer"
"github.com/golang/protobuf/proto"
)
// Broadcaster allows the caller to submit messages to the orderer
type Broadcaster interface {
Broadcast(stream ab.AtomicBroadcast_BroadcastServer) error
Closeable
}
type broadcasterImpl struct {
producer Producer
config *config.TopLevel
once sync.Once
batchChan chan *cb.Envelope
messages [][]byte
nextNumber uint64
prevHash []byte
}
func newBroadcaster(conf *config.TopLevel) Broadcaster {
return &broadcasterImpl{
producer: newProducer(conf),
config: conf,
batchChan: make(chan *cb.Envelope, conf.General.BatchSize),
messages: [][]byte{[]byte("genesis")},
nextNumber: 0,
}
}
// Broadcast receives ordering requests by clients and sends back an
// acknowledgement for each received message in order, indicating
// success or type of failure
func (b *broadcasterImpl) Broadcast(stream ab.AtomicBroadcast_BroadcastServer) error {
b.once.Do(func() {
// Send the genesis block to create the topic
// otherwise consumers will throw an exception.
b.sendBlock()
// Spawn the goroutine that cuts blocks
go b.cutBlock(b.config.General.BatchTimeout, b.config.General.BatchSize)
})
return b.recvRequests(stream)
}
// Close shuts down the broadcast side of the orderer
func (b *broadcasterImpl) Close() error {
if b.producer != nil {
return b.producer.Close()
}
return nil
}
func (b *broadcasterImpl) sendBlock() error {
data := &cb.BlockData{
Data: b.messages,
}
block := &cb.Block{
Header: &cb.BlockHeader{
Number: b.nextNumber,
PreviousHash: b.prevHash,
DataHash: data.Hash(),
},
Data: data,
}
logger.Debugf("Prepared block %d with %d messages (%+v)", block.Header.Number, len(block.Data.Data), block)
b.messages = [][]byte{}
b.nextNumber++
b.prevHash = block.Header.Hash()
blockBytes, err := proto.Marshal(block)
if err != nil {
logger.Fatalf("Error marshaling block: %s", err)
}
return b.producer.Send(blockBytes)
}
func (b *broadcasterImpl) cutBlock(period time.Duration, maxSize uint) {
timer := time.NewTimer(period)
for {
select {
case msg := <-b.batchChan:
data, err := proto.Marshal(msg)
if err != nil {
logger.Fatalf("Error marshaling what should be a valid proto message: %s", err)
}
b.messages = append(b.messages, data)
if len(b.messages) >= int(maxSize) { | if !timer.Stop() {
<-timer.C
}
timer.Reset(period)
if err := b.sendBlock(); err != nil {
panic(fmt.Errorf("Cannot communicate with Kafka broker: %s", err))
}
}
case <-timer.C:
timer.Reset(period)
if len(b.messages) > 0 {
if err := b.sendBlock(); err != nil {
panic(fmt.Errorf("Cannot communicate with Kafka broker: %s", err))
}
}
}
}
}
func (b *broadcasterImpl) recvRequests(stream ab.AtomicBroadcast_BroadcastServer) error {
reply := new(ab.BroadcastResponse)
for {
msg, err := stream.Recv()
if err != nil {
logger.Debug("Can no longer receive requests from client (exited?)")
return err
}
b.batchChan <- msg
reply.Status = cb.Status_SUCCESS // TODO This shouldn't always be a success
if err := stream.Send(reply); err != nil {
logger.Info("Cannot send broadcast reply to client")
return err
}
logger.Debugf("Sent broadcast reply %v to client", reply.Status.String())
}
} | |
ICompMint__factory.ts | /* Autogenerated file. Do not edit manually. */
/* tslint:disable */
/* eslint-disable */
import { Contract, Signer } from "ethers";
import { Provider } from "@ethersproject/providers";
import type { ICompMint } from "../ICompMint";
export class | {
static connect(
address: string,
signerOrProvider: Signer | Provider
): ICompMint {
return new Contract(address, _abi, signerOrProvider) as ICompMint;
}
}
const _abi = [
{
inputs: [
{
internalType: "uint256",
name: "id",
type: "uint256",
},
],
name: "mint",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "uint256",
name: "id",
type: "uint256",
},
{
internalType: "uint256",
name: "deadline",
type: "uint256",
},
{
internalType: "uint8",
name: "v",
type: "uint8",
},
{
internalType: "bytes32",
name: "r",
type: "bytes32",
},
{
internalType: "bytes32",
name: "s",
type: "bytes32",
},
],
name: "mintWithPermit",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
];
| ICompMint__factory |
model_synthetics_test_pause_status.go | /*
* Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
* This product includes software developed at Datadog (https://www.datadoghq.com/).
* Copyright 2019-Present Datadog, Inc.
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package datadog
import ( | )
// SyntheticsTestPauseStatus Define whether you want to start (`live`) or pause (`paused`) a Synthetic test.
type SyntheticsTestPauseStatus string
// List of SyntheticsTestPauseStatus
const (
SYNTHETICSTESTPAUSESTATUS_LIVE SyntheticsTestPauseStatus = "live"
SYNTHETICSTESTPAUSESTATUS_PAUSED SyntheticsTestPauseStatus = "paused"
)
var allowedSyntheticsTestPauseStatusEnumValues = []SyntheticsTestPauseStatus{
"live",
"paused",
}
func (w *SyntheticsTestPauseStatus) GetAllowedValues() []SyntheticsTestPauseStatus {
return allowedSyntheticsTestPauseStatusEnumValues
}
func (v *SyntheticsTestPauseStatus) UnmarshalJSON(src []byte) error {
var value string
err := json.Unmarshal(src, &value)
if err != nil {
return err
}
ev, err := NewSyntheticsTestPauseStatusFromValue(value)
if err != nil {
return err
}
*v = *ev
return nil
}
// NewSyntheticsTestPauseStatusFromValue returns a pointer to a valid SyntheticsTestPauseStatus
// for the value passed as argument, or an error if the value passed is not allowed by the enum
func NewSyntheticsTestPauseStatusFromValue(v string) (*SyntheticsTestPauseStatus, error) {
ev := SyntheticsTestPauseStatus(v)
if ev.IsValid() {
return &ev, nil
} else {
return nil, fmt.Errorf("invalid value '%v' for SyntheticsTestPauseStatus: valid values are %v", v, allowedSyntheticsTestPauseStatusEnumValues)
}
}
// IsValid return true if the value is valid for the enum, false otherwise
func (v SyntheticsTestPauseStatus) IsValid() bool {
for _, existing := range allowedSyntheticsTestPauseStatusEnumValues {
if existing == v {
return true
}
}
return false
}
// Ptr returns reference to SyntheticsTestPauseStatus value
func (v SyntheticsTestPauseStatus) Ptr() *SyntheticsTestPauseStatus {
return &v
}
type NullableSyntheticsTestPauseStatus struct {
value *SyntheticsTestPauseStatus
isSet bool
}
func (v NullableSyntheticsTestPauseStatus) Get() *SyntheticsTestPauseStatus {
return v.value
}
func (v *NullableSyntheticsTestPauseStatus) Set(val *SyntheticsTestPauseStatus) {
v.value = val
v.isSet = true
}
func (v NullableSyntheticsTestPauseStatus) IsSet() bool {
return v.isSet
}
func (v *NullableSyntheticsTestPauseStatus) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableSyntheticsTestPauseStatus(val *SyntheticsTestPauseStatus) *NullableSyntheticsTestPauseStatus {
return &NullableSyntheticsTestPauseStatus{value: val, isSet: true}
}
func (v NullableSyntheticsTestPauseStatus) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableSyntheticsTestPauseStatus) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | "encoding/json"
"fmt" |
index.tsx | import React from "react";
import { History, Location } from "history";
import { FormControl } from "react-bootstrap";
import numeral from "numeral";
import { Global } from "../../store/global/types";
import { TrendingTags } from "../../store/trending-tags/types";
import BaseComponent from "../base";
import SearchBox from "../search-box";
import SearchSuggester from "../search-suggester";
import { _t } from "../../i18n";
import queryString from "query-string";
interface Props {
history: History;
location: Location;
global: Global;
trendingTags: TrendingTags;
fetchTrendingTags: () => void;
containerClassName?: string;
}
interface State {
query: string;
}
export class Search extends BaseComponent<Props, State> {
state: State = {
query: "",
};
componentDidMount() {
const { fetchTrendingTags } = this.props;
// fetchTrendingTags();
// this.grabSearchQuery();
}
componentDidUpdate(prevProps: Readonly<Props>): void {
const { location } = this.props;
if (location.pathname !== prevProps.location.pathname) {
this.stateSet({
query: "",
});
return;
}
// if (this.isSearchPage() && location.search !== prevProps.location.search) {
// this.grabSearchQuery();
// }
}
grabSearchQuery = () => {
const { location } = this.props;
if (this.isSearchPage()) {
const qs = queryString.parse(location.search);
const query = (qs.q as string) || "";
this.stateSet({ query });
}
};
isSearchPage = () => this.props.location.pathname.startsWith("/search");
queryChanged = ( | };
onKeyDown = (e: React.KeyboardEvent) => {
if (e.keyCode === 13) {
const { history, location } = this.props;
const { query } = this.state;
if (["/search-more", "/search-more/"].includes(location.pathname)) {
history.push(`/search-more/?q=${encodeURIComponent(query)}`);
} else {
history.push(`/search/?q=${encodeURIComponent(query)}`);
}
}
};
render() {
const { global, containerClassName } = this.props;
const { query } = this.state;
const placeholder =
global.searchIndexCount > 0
? _t("search.placeholder-count", {
n: numeral(global.searchIndexCount).format("0,0"),
})
: _t("search.placeholder");
return (
<>
<SearchSuggester
{...this.props}
value={query}
containerClassName={containerClassName}
changed={true}
>
<SearchBox
placeholder={placeholder}
value={query}
onChange={this.queryChanged}
onKeyDown={this.onKeyDown}
autoComplete="off"
/>
</SearchSuggester>
</>
);
}
}
export default (p: Props) => {
const props: Props = {
history: p.history,
location: p.location,
global: p.global,
trendingTags: p.trendingTags,
fetchTrendingTags: p.fetchTrendingTags,
containerClassName: p.containerClassName,
};
return <Search {...props} />;
}; | e: React.ChangeEvent<typeof FormControl & HTMLInputElement>
) => {
const query = e.target.value.toLowerCase();
this.stateSet({ query }); |
gateways.go | // Copyright (c) 2012-2014 Jeremy Latt
// Copyright (c) 2014-2015 Edmund Huber
// Copyright (c) 2017 Daniel Oaks <[email protected]>
// released under the MIT license
package irc
import (
"errors"
"net"
|
var (
errBadGatewayAddress = errors.New("PROXY/WEBIRC commands are not accepted from this IP address")
errBadProxyLine = errors.New("Invalid PROXY/WEBIRC command")
)
const (
// https://www.haproxy.org/download/1.8/doc/proxy-protocol.txt
// "a 108-byte buffer is always enough to store all the line and a trailing zero
// for string processing."
maxProxyLineLen = 107
)
type webircConfig struct {
PasswordString string `yaml:"password"`
Password []byte `yaml:"password-bytes"`
Fingerprint *string // legacy name for certfp, #1050
Certfp string
Hosts []string
allowedNets []net.IPNet
}
// Populate fills out our password or fingerprint.
func (wc *webircConfig) Populate() (err error) {
if wc.PasswordString != "" {
wc.Password, err = decodeLegacyPasswordHash(wc.PasswordString)
if err != nil {
return
}
}
certfp := wc.Certfp
if certfp == "" && wc.Fingerprint != nil {
certfp = *wc.Fingerprint
}
if certfp != "" {
wc.Certfp, err = utils.NormalizeCertfp(certfp)
}
if err != nil {
return
}
if wc.Certfp == "" && wc.PasswordString == "" {
return errors.New("webirc block has no certfp or password specified")
}
wc.allowedNets, err = utils.ParseNetList(wc.Hosts)
return err
}
// ApplyProxiedIP applies the given IP to the client.
func (client *Client) ApplyProxiedIP(session *Session, proxiedIP net.IP, tls bool) (err error, quitMsg string) {
// PROXY and WEBIRC are never accepted from a Tor listener, even if the address itself
// is whitelisted. Furthermore, don't accept PROXY or WEBIRC if we already accepted
// a proxied IP from any source (PROXY, WEBIRC, or X-Forwarded-For):
if session.isTor || session.proxiedIP != nil {
return errBadProxyLine, ""
}
// ensure IP is sane
if proxiedIP == nil {
return errBadProxyLine, "proxied IP is not valid"
}
proxiedIP = proxiedIP.To16()
isBanned, banMsg := client.server.checkBans(proxiedIP)
if isBanned {
return errBanned, banMsg
}
// successfully added a limiter entry for the proxied IP;
// remove the entry for the real IP if applicable (#197)
client.server.connectionLimiter.RemoveClient(session.realIP)
// given IP is sane! override the client's current IP
client.server.logger.Info("connect-ip", "Accepted proxy IP for client", proxiedIP.String())
client.stateMutex.Lock()
defer client.stateMutex.Unlock()
client.proxiedIP = proxiedIP
session.proxiedIP = proxiedIP
// nickmask will be updated when the client completes registration
// set tls info
session.certfp = ""
client.SetMode(modes.TLS, tls)
return nil, ""
}
// handle the PROXY command: http://www.haproxy.org/download/1.8/doc/proxy-protocol.txt
// PROXY must be sent as the first message in the session and has the syntax:
// PROXY TCP[46] SOURCEIP DESTIP SOURCEPORT DESTPORT\r\n
// unfortunately, an ipv6 SOURCEIP can start with a double colon; in this case,
// the message is invalid IRC and can't be parsed normally, hence the special handling.
func handleProxyCommand(server *Server, client *Client, session *Session, line string) (err error) {
var quitMsg string
defer func() {
if err != nil {
if quitMsg == "" {
quitMsg = client.t("Bad or unauthorized PROXY command")
}
client.Quit(quitMsg, session)
}
}()
ip, err := utils.ParseProxyLine(line)
if err != nil {
return err
}
if utils.IPInNets(client.realIP, server.Config().Server.proxyAllowedFromNets) {
// assume PROXY connections are always secure
err, quitMsg = client.ApplyProxiedIP(session, ip, true)
return
} else {
// real source IP is not authorized to issue PROXY:
return errBadGatewayAddress
}
} | "github.com/oragono/oragono/irc/modes"
"github.com/oragono/oragono/irc/utils"
) |
PRESUBMIT.py | # Copyright 2018 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
def | (input_api, output_api):
tests = input_api.canned_checks.GetUnitTestsInDirectory(
input_api, output_api, 'unittests')
return input_api.RunTests(tests)
| CheckChangeOnCommit |
item.tsx | import React from 'react';
import AntForm, {FormItemProps} from 'antd/lib/form';
const AntFormItem = AntForm.Item; | const displayName = 'Form.Item';
const Item: React.FunctionComponent<FormItemProps> = ({children, ...props}) => (
<AntFormItem data-test={displayName} {...props}>
{children}
</AntFormItem>
);
Item.displayName = displayName;
export {Item};
export type {FormItemProps}; | |
PID.py | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
class PID():
def __init__(self, P,I,D, bias):
self.P = P
self.I = I
self.D = D
self.bias = bias
def | (self,data:np.array):
output = self.P*self.prop(data) + self.I*self.inte(data) + self.D*self.der(data) + self.bias
return output | adjust |
index.ts | export * from './findScrollParent'
export * from './getUserPlatform' |
||
MessagesController.ts | import { Request, Response } from 'express'; | async create(request: Request, response: Response) {
const { admin_id, text, user_id} = request.body;
const messagesServices = new MessagesService();
const message = await messagesServices.create({
admin_id,
text,
user_id,
});
return response.json(message);
}
// localhost:3333/messages/idDoUsuario
async showByUser(request: Request, response: Response) {
const { id } = request.params;
const messagesServices = new MessagesService();
const list = await messagesServices.listByUser(id);
return response.json(list);
}
}
export { MessagesController }; | import { MessagesService } from "../services/MessagesService";
class MessagesController { |
yellows.rs | //! Tests auto-converted from "sass-spec/spec/core_functions/color/hwb/three_args/w3c/yellows.hrx"
#[allow(unused)]
fn runner() -> crate::TestRunner {
super::runner()
}
#[test]
fn | () {
assert_eq!(
runner().ok("@use \'../test-hue\' as *;\
\n@include test-hue(60);\n"),
"whiteness-0 {\
\n blackness-0: yellow;\
\n blackness-20: #cccc00;\
\n blackness-40: #999900;\
\n blackness-60: #666600;\
\n blackness-80: #333300;\
\n blackness-100: black;\
\n}\
\nwhiteness-20 {\
\n blackness-0: #ffff33;\
\n blackness-20: #cccc33;\
\n blackness-40: #999933;\
\n blackness-60: #666633;\
\n blackness-80: #333333;\
\n blackness-100: #2b2b2b;\
\n}\
\nwhiteness-40 {\
\n blackness-0: #ffff66;\
\n blackness-20: #cccc66;\
\n blackness-40: #999966;\
\n blackness-60: #666666;\
\n blackness-80: #555555;\
\n blackness-100: #494949;\
\n}\
\nwhiteness-60 {\
\n blackness-0: #ffff99;\
\n blackness-20: #cccc99;\
\n blackness-40: #999999;\
\n blackness-60: gray;\
\n blackness-80: #6d6d6d;\
\n blackness-100: #606060;\
\n}\
\nwhiteness-80 {\
\n blackness-0: #ffffcc;\
\n blackness-20: #cccccc;\
\n blackness-40: #aaaaaa;\
\n blackness-60: #929292;\
\n blackness-80: gray;\
\n blackness-100: #717171;\
\n}\
\nwhiteness-100 {\
\n blackness-0: white;\
\n blackness-20: #d5d5d5;\
\n blackness-40: #b6b6b6;\
\n blackness-60: #9f9f9f;\
\n blackness-80: #8e8e8e;\
\n blackness-100: gray;\
\n}\n"
);
}
| test |
priorNet.py | import sys
sys.path.append("../../")
import lib.gcn3d as gcn3d
import torch
import torch.nn as nn
import torch.nn.functional as F
class PriorEncoder(nn.Module):
def __init__(self, support_num: int, neighbor_num: int):
super(PriorEncoder, self).__init__()
self.neighbor_num = neighbor_num
self.conv_0 = gcn3d.Conv_surface(kernel_num=32, support_num=support_num)
self.conv_1 = gcn3d.Conv_layer(32, 64, support_num=support_num)
self.pool_1 = gcn3d.Pool_layer(pooling_rate=4, neighbor_num=4)
self.conv_2 = gcn3d.Conv_layer(64, 128, support_num=support_num)
self.conv_3 = gcn3d.Conv_layer(128, 256, support_num=support_num)
self.pool_2 = gcn3d.Pool_layer(pooling_rate=4, neighbor_num=4)
self.conv_4 = gcn3d.Conv_layer(256, 512, support_num=support_num)
self.pool_3 = gcn3d.Pool_layer(pooling_rate=4, neighbor_num=4)
def forward(self, vertices: "(bs, vertice_num, 3)"):
bs, vertice_num, _ = vertices.size()
neighbor_index = gcn3d.get_neighbor_index(vertices, self.neighbor_num)
fm_0 = self.conv_0(neighbor_index, vertices)
fm_0 = F.relu(fm_0, inplace=True)
fm_1 = self.conv_1(neighbor_index, vertices, fm_0)
fm_1 = F.relu(fm_1, inplace=True)
vertices, fm_1 = self.pool_1(vertices, fm_1)
neighbor_index = gcn3d.get_neighbor_index(vertices, self.neighbor_num)
fm_2 = self.conv_2(neighbor_index, vertices, fm_1)
fm_2 = F.relu(fm_2, inplace=True)
fm_3 = self.conv_3(neighbor_index, vertices, fm_2)
fm_3 = F.relu(fm_3, inplace=True)
vertices, fm_3 = self.pool_2(vertices, fm_3)
neighbor_index = gcn3d.get_neighbor_index(vertices, self.neighbor_num)
fm_4 = self.conv_4(neighbor_index, vertices, fm_3)
feature_global = fm_4.max(1)[0]
# fm_4 = F.relu(fm_4, inplace=True)
# vertices, fm_4 = self.pool_3(vertices, fm_4)
return feature_global
class PriorDecoder(nn.Module):
def __init__(self, emb_dim, n_pts):
|
def forward(self, embedding):
"""
Args:
embedding: (B, 512)
"""
bs = embedding.size()[0]
out1 = F.relu(self.fc1(embedding))
out2 = F.relu(self.fc2(out1))
out3 = self.fc3(out2)
out_pc = out3.view(bs, -1, 3)
return out_pc
class PriorNet(nn.Module):
def __init__(self, emb_dim=512, n_pts=1024):
super(PriorNet, self).__init__()
self.encoder = PriorEncoder(1, 20)
self.decoder = PriorDecoder(emb_dim, n_pts)
def forward(self, in_pc):
emb = self.encoder(in_pc)
out_pc = self.decoder(emb)
return emb, out_pc
if __name__ == '__main__':
estimator = PriorEncoder(1, 1)
xyz = torch.randn(32, 2048, 3)
gg = estimator(xyz) | super(PriorDecoder, self).__init__()
self.fc1 = nn.Linear(emb_dim, 512)
self.fc2 = nn.Linear(512, 1024)
self.fc3 = nn.Linear(1024, 3 * n_pts) |
helpers.go | package pgghelpers
import (
"encoding/json"
"fmt"
"reflect"
"regexp"
"strings"
"sync"
"text/template"
"github.com/Masterminds/sprig"
"github.com/golang/protobuf/proto"
"github.com/golang/protobuf/protoc-gen-go/descriptor"
ggdescriptor "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor"
"github.com/huandu/xstrings"
options "google.golang.org/genproto/googleapis/api/annotations"
)
var jsReservedRe = regexp.MustCompile(`(^|[^A-Za-z])(do|if|in|for|let|new|try|var|case|else|enum|eval|false|null|this|true|void|with|break|catch|class|const|super|throw|while|yield|delete|export|import|public|return|static|switch|typeof|default|extends|finally|package|private|continue|debugger|function|arguments|interface|protected|implements|instanceof)($|[^A-Za-z])`)
var (
registry *ggdescriptor.Registry // some helpers need access to registry
)
var ProtoHelpersFuncMap = template.FuncMap{
"string": func(i interface {
String() string
}) string {
return i.String()
},
"json": func(v interface{}) string {
a, err := json.Marshal(v)
if err != nil {
return err.Error()
}
return string(a)
},
"prettyjson": func(v interface{}) string {
a, err := json.MarshalIndent(v, "", " ")
if err != nil {
return err.Error()
}
return string(a)
},
"splitArray": func(sep string, s string) []interface{} {
var r []interface{}
t := strings.Split(s, sep)
for i := range t {
if t[i] != "" {
r = append(r, t[i])
}
}
return r
},
"first": func(a []string) string {
return a[0]
},
"last": func(a []string) string {
return a[len(a)-1]
},
"concat": func(a string, b ...string) string {
return strings.Join(append([]string{a}, b...), "")
},
"join": func(sep string, a ...string) string {
return strings.Join(a, sep)
},
"upperFirst": func(s string) string {
return strings.ToUpper(s[:1]) + s[1:]
},
"lowerFirst": func(s string) string {
return strings.ToLower(s[:1]) + s[1:]
},
"camelCase": func(s string) string {
if len(s) > 1 {
return xstrings.ToCamelCase(s)
}
return strings.ToUpper(s[:1])
},
"lowerCamelCase": func(s string) string {
if len(s) > 1 {
s = xstrings.ToCamelCase(s)
}
return strings.ToLower(s[:1]) + s[1:]
},
"kebabCase": func(s string) string {
return strings.Replace(xstrings.ToSnakeCase(s), "_", "-", -1)
},
"contains": func(sub, s string) bool {
return strings.Contains(s, sub)
},
"trimstr": func(cutset, s string) string {
return strings.Trim(s, cutset)
},
"index": func(array interface{}, i int32) interface{} {
slice := reflect.ValueOf(array)
if slice.Kind() != reflect.Slice {
panic("Error in index(): given a non-slice type")
}
if i < 0 || int(i) >= slice.Len() {
panic("Error in index(): index out of bounds")
}
return slice.Index(int(i)).Interface()
},
"add": func(a int, b int) int {
return a + b
},
"subtract": func(a int, b int) int {
return a - b
},
"multiply": func(a int, b int) int {
return a * b
},
"divide": func(a int, b int) int {
if b == 0 {
panic("psssst ... little help here ... you cannot divide by 0")
}
return a / b
},
"snakeCase": xstrings.ToSnakeCase,
"getProtoFile": getProtoFile,
"getMessageType": getMessageType,
"getEnumValue": getEnumValue,
"isFieldMessage": isFieldMessage,
"isFieldMessageTimeStamp": isFieldMessageTimeStamp,
"isFieldRepeated": isFieldRepeated,
"haskellType": haskellType,
"goType": goType,
"goZeroValue": goZeroValue,
"goTypeWithPackage": goTypeWithPackage,
"goTypeWithGoPackage": goTypeWithGoPackage,
"jsType": jsType,
"jsSuffixReserved": jsSuffixReservedKeyword,
"namespacedFlowType": namespacedFlowType,
"httpVerb": httpVerb,
"httpPath": httpPath,
"httpPathsAdditionalBindings": httpPathsAdditionalBindings,
"httpBody": httpBody,
"shortType": shortType,
"urlHasVarsFromMessage": urlHasVarsFromMessage,
"lowerGoNormalize": lowerGoNormalize,
"goNormalize": goNormalize,
"leadingComment": leadingComment,
"trailingComment": trailingComment,
"leadingDetachedComments": leadingDetachedComments,
"stringMessageExtension": stringMessageExtension,
"stringFieldExtension": stringFieldExtension,
"int64FieldExtension": int64FieldExtension,
"int64MessageExtension": int64MessageExtension,
"stringMethodOptionsExtension": stringMethodOptionsExtension,
"boolMethodOptionsExtension": boolMethodOptionsExtension,
"boolMessageExtension": boolMessageExtension,
"boolFieldExtension": boolFieldExtension,
"isFieldMap": isFieldMap,
"fieldMapKeyType": fieldMapKeyType,
"fieldMapValueType": fieldMapValueType,
"replaceDict": replaceDict,
"setStore": setStore,
"getStore": getStore,
"goPkg": goPkg,
"goPkgLastElement": goPkgLastElement,
}
var pathMap map[interface{}]*descriptor.SourceCodeInfo_Location
var store = newStore()
// Utility to store some vars across multiple scope
type globalStore struct {
store map[string]interface{}
mu sync.Mutex
}
func newStore() *globalStore {
return &globalStore{
store: make(map[string]interface{}),
}
}
func (s *globalStore) getData(key string) interface{} {
s.mu.Lock()
defer s.mu.Unlock()
if v, ok := s.store[key]; ok {
return v
}
return false
}
func (s *globalStore) setData(key string, o interface{}) {
s.mu.Lock()
s.store[key] = o
s.mu.Unlock()
}
func setStore(key string, o interface{}) string {
store.setData(key, o)
return ""
}
func getStore(key string) interface{} {
return store.getData(key)
}
func SetRegistry(reg *ggdescriptor.Registry) {
registry = reg
}
func InitPathMap(file *descriptor.FileDescriptorProto) {
pathMap = make(map[interface{}]*descriptor.SourceCodeInfo_Location)
addToPathMap(file.GetSourceCodeInfo(), file, []int32{})
}
func InitPathMaps(files []*descriptor.FileDescriptorProto) {
pathMap = make(map[interface{}]*descriptor.SourceCodeInfo_Location)
for _, file := range files {
addToPathMap(file.GetSourceCodeInfo(), file, []int32{})
}
}
// addToPathMap traverses through the AST adding SourceCodeInfo_Location entries to the pathMap.
// Since the AST is a tree, the recursion finishes once it has gone through all the nodes.
func addToPathMap(info *descriptor.SourceCodeInfo, i interface{}, path []int32) {
loc := findLoc(info, path)
if loc != nil {
pathMap[i] = loc
}
switch d := i.(type) {
case *descriptor.FileDescriptorProto:
for index, descriptor := range d.MessageType {
addToPathMap(info, descriptor, newPath(path, 4, index))
}
for index, descriptor := range d.EnumType {
addToPathMap(info, descriptor, newPath(path, 5, index))
}
for index, descriptor := range d.Service {
addToPathMap(info, descriptor, newPath(path, 6, index))
}
case *descriptor.DescriptorProto:
for index, descriptor := range d.Field {
addToPathMap(info, descriptor, newPath(path, 2, index))
}
for index, descriptor := range d.NestedType {
addToPathMap(info, descriptor, newPath(path, 3, index))
}
for index, descriptor := range d.EnumType {
addToPathMap(info, descriptor, newPath(path, 4, index))
}
case *descriptor.EnumDescriptorProto:
for index, descriptor := range d.Value {
addToPathMap(info, descriptor, newPath(path, 2, index))
}
case *descriptor.ServiceDescriptorProto:
for index, descriptor := range d.Method {
addToPathMap(info, descriptor, newPath(path, 2, index))
}
}
}
func newPath(base []int32, field int32, index int) []int32 {
p := append([]int32{}, base...)
p = append(p, field, int32(index))
return p
}
func findLoc(info *descriptor.SourceCodeInfo, path []int32) *descriptor.SourceCodeInfo_Location {
for _, loc := range info.GetLocation() {
if samePath(loc.Path, path) {
return loc
}
}
return nil
}
func samePath(a, b []int32) bool {
if len(a) != len(b) {
return false
}
for i, p := range a {
if p != b[i] {
return false
}
}
return true
}
/*func findSourceInfoLocation(i interface{}) *descriptor.SourceCodeInfo_Location {
if pathMap == nil {
return nil
}
return pathMap[i]
}*/
func leadingComment(i interface{}) string {
loc := pathMap[i]
return loc.GetLeadingComments()
}
func trailingComment(i interface{}) string {
loc := pathMap[i]
return loc.GetTrailingComments()
}
func leadingDetachedComments(i interface{}) []string {
loc := pathMap[i]
return loc.GetLeadingDetachedComments()
}
// stringMethodOptionsExtension extracts method options of a string type.
// To define your own extensions see:
// https://developers.google.com/protocol-buffers/docs/proto#customoptions
// Typically the fieldID of private extensions should be in the range:
// 50000-99999
func stringMethodOptionsExtension(fieldID int32, f *descriptor.MethodDescriptorProto) string {
if f == nil {
return ""
}
if f.Options == nil {
return ""
}
var extendedType *descriptor.MethodOptions
var extensionType *string
eds := proto.RegisteredExtensions(f.Options)
if eds[fieldID] == nil {
ed := &proto.ExtensionDesc{
ExtendedType: extendedType,
ExtensionType: extensionType,
Field: fieldID,
Tag: fmt.Sprintf("bytes,%d", fieldID),
}
proto.RegisterExtension(ed)
eds = proto.RegisteredExtensions(f.Options)
}
ext, err := proto.GetExtension(f.Options, eds[fieldID])
if err != nil {
return ""
}
str, ok := ext.(*string)
if !ok {
return ""
}
return *str
}
func stringFieldExtension(fieldID int32, f *descriptor.FieldDescriptorProto) string {
if f == nil {
return ""
}
if f.Options == nil {
return ""
}
var extendedType *descriptor.FieldOptions
var extensionType *string
eds := proto.RegisteredExtensions(f.Options)
if eds[fieldID] == nil {
ed := &proto.ExtensionDesc{
ExtendedType: extendedType,
ExtensionType: extensionType,
Field: fieldID,
Tag: fmt.Sprintf("bytes,%d", fieldID),
}
proto.RegisterExtension(ed)
eds = proto.RegisteredExtensions(f.Options)
}
ext, err := proto.GetExtension(f.Options, eds[fieldID])
if err != nil {
return ""
}
str, ok := ext.(*string)
if !ok {
return ""
}
return *str
}
func int64FieldExtension(fieldID int32, f *descriptor.FieldDescriptorProto) int64 {
if f == nil {
return 0
}
if f.Options == nil {
return 0
}
var extendedType *descriptor.FieldOptions
var extensionType *int64
eds := proto.RegisteredExtensions(f.Options)
if eds[fieldID] == nil {
ed := &proto.ExtensionDesc{
ExtendedType: extendedType,
ExtensionType: extensionType,
Field: fieldID,
Tag: fmt.Sprintf("varint,%d", fieldID),
}
proto.RegisterExtension(ed)
eds = proto.RegisteredExtensions(f.Options)
}
ext, err := proto.GetExtension(f.Options, eds[fieldID])
if err != nil {
return 0
}
i, ok := ext.(*int64)
if !ok {
return 0
}
return *i
}
func int64MessageExtension(fieldID int32, f *descriptor.DescriptorProto) int64 {
if f == nil {
return 0
}
if f.Options == nil {
return 0
}
var extendedType *descriptor.MessageOptions
var extensionType *int64
eds := proto.RegisteredExtensions(f.Options)
if eds[fieldID] == nil {
ed := &proto.ExtensionDesc{
ExtendedType: extendedType,
ExtensionType: extensionType,
Field: fieldID,
Tag: fmt.Sprintf("varint,%d", fieldID),
}
proto.RegisterExtension(ed)
eds = proto.RegisteredExtensions(f.Options)
}
ext, err := proto.GetExtension(f.Options, eds[fieldID])
if err != nil {
return 0
}
i, ok := ext.(*int64)
if !ok {
return 0
}
return *i
}
func stringMessageExtension(fieldID int32, f *descriptor.DescriptorProto) string {
if f == nil {
return ""
}
if f.Options == nil {
return ""
}
var extendedType *descriptor.MessageOptions
var extensionType *string
eds := proto.RegisteredExtensions(f.Options)
if eds[fieldID] == nil {
ed := &proto.ExtensionDesc{
ExtendedType: extendedType,
ExtensionType: extensionType,
Field: fieldID,
Tag: fmt.Sprintf("bytes,%d", fieldID),
}
proto.RegisterExtension(ed)
eds = proto.RegisteredExtensions(f.Options)
}
ext, err := proto.GetExtension(f.Options, eds[fieldID])
if err != nil {
return ""
}
str, ok := ext.(*string)
if !ok {
return ""
}
return *str
}
func boolMethodOptionsExtension(fieldID int32, f *descriptor.MethodDescriptorProto) bool {
if f == nil {
return false
}
if f.Options == nil {
return false
}
var extendedType *descriptor.MethodOptions
var extensionType *bool
eds := proto.RegisteredExtensions(f.Options)
if eds[fieldID] == nil {
ed := &proto.ExtensionDesc{
ExtendedType: extendedType,
ExtensionType: extensionType,
Field: fieldID,
Tag: fmt.Sprintf("varint,%d", fieldID),
}
proto.RegisterExtension(ed)
eds = proto.RegisteredExtensions(f.Options)
}
ext, err := proto.GetExtension(f.Options, eds[fieldID])
if err != nil {
return false
}
b, ok := ext.(*bool)
if !ok {
return false
}
return *b
}
func boolFieldExtension(fieldID int32, f *descriptor.FieldDescriptorProto) bool {
if f == nil {
return false
}
if f.Options == nil {
return false
}
var extendedType *descriptor.FieldOptions
var extensionType *bool
eds := proto.RegisteredExtensions(f.Options)
if eds[fieldID] == nil {
ed := &proto.ExtensionDesc{
ExtendedType: extendedType,
ExtensionType: extensionType,
Field: fieldID,
Tag: fmt.Sprintf("varint,%d", fieldID),
}
proto.RegisterExtension(ed)
eds = proto.RegisteredExtensions(f.Options)
}
ext, err := proto.GetExtension(f.Options, eds[fieldID])
if err != nil {
return false
}
b, ok := ext.(*bool)
if !ok {
return false
}
return *b
}
func boolMessageExtension(fieldID int32, f *descriptor.DescriptorProto) bool {
if f == nil {
return false
}
if f.Options == nil {
return false
}
var extendedType *descriptor.MessageOptions
var extensionType *bool
eds := proto.RegisteredExtensions(f.Options)
if eds[fieldID] == nil {
ed := &proto.ExtensionDesc{
ExtendedType: extendedType,
ExtensionType: extensionType,
Field: fieldID,
Tag: fmt.Sprintf("varint,%d", fieldID),
}
proto.RegisterExtension(ed)
eds = proto.RegisteredExtensions(f.Options)
}
ext, err := proto.GetExtension(f.Options, eds[fieldID])
if err != nil {
return false
}
b, ok := ext.(*bool)
if !ok {
return false
}
return *b
}
func init() {
for k, v := range sprig.TxtFuncMap() {
ProtoHelpersFuncMap[k] = v
}
}
func getProtoFile(name string) *ggdescriptor.File {
if registry == nil {
return nil
}
file, err := registry.LookupFile(name)
if err != nil {
panic(err)
}
return file
}
func getMessageType(f *descriptor.FileDescriptorProto, name string) *ggdescriptor.Message {
if registry != nil {
msg, err := registry.LookupMsg(".", name)
if err != nil {
panic(err)
}
return msg
}
// name is in the form .packageName.MessageTypeName.InnerMessageTypeName...
// e.g. .article.ProductTag
splits := strings.Split(name, ".")
target := splits[len(splits)-1]
for _, m := range f.MessageType {
if target == *m.Name {
return &ggdescriptor.Message{
DescriptorProto: m,
}
}
}
return nil
}
func getEnumValue(f []*descriptor.EnumDescriptorProto, name string) []*descriptor.EnumValueDescriptorProto {
for _, item := range f {
if strings.EqualFold(*item.Name, name) {
return item.GetValue()
}
}
return nil
}
func isFieldMessageTimeStamp(f *descriptor.FieldDescriptorProto) bool {
if f.Type != nil && *f.Type == descriptor.FieldDescriptorProto_TYPE_MESSAGE {
if strings.Compare(*f.TypeName, ".google.protobuf.Timestamp") == 0 {
return true
}
}
return false
}
func isFieldMessage(f *descriptor.FieldDescriptorProto) bool {
if f.Type != nil && *f.Type == descriptor.FieldDescriptorProto_TYPE_MESSAGE {
return true
}
return false
}
func isFieldRepeated(f *descriptor.FieldDescriptorProto) bool {
if f == nil {
return false
}
if f.Type != nil && f.Label != nil && *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return true
}
return false
}
func isFieldMap(f *descriptor.FieldDescriptorProto, m *descriptor.DescriptorProto) bool {
if f.TypeName == nil {
return false
}
shortName := shortType(*f.TypeName)
var nt *descriptor.DescriptorProto
for _, t := range m.NestedType {
if *t.Name == shortName {
nt = t
break
}
}
if nt == nil {
return false
}
for _, f := range nt.Field {
switch *f.Name {
case "key":
if *f.Number != 1 {
return false
}
case "value":
if *f.Number != 2 {
return false
}
default:
return false
}
}
return true
}
func fieldMapKeyType(f *descriptor.FieldDescriptorProto, m *descriptor.DescriptorProto) *descriptor.FieldDescriptorProto {
if f.TypeName == nil {
return nil
}
shortName := shortType(*f.TypeName)
var nt *descriptor.DescriptorProto
for _, t := range m.NestedType {
if *t.Name == shortName {
nt = t
break
}
}
if nt == nil {
return nil
}
for _, f := range nt.Field {
if *f.Name == "key" {
return f
}
}
return nil
}
func fieldMapValueType(f *descriptor.FieldDescriptorProto, m *descriptor.DescriptorProto) *descriptor.FieldDescriptorProto {
if f.TypeName == nil {
return nil
}
shortName := shortType(*f.TypeName)
var nt *descriptor.DescriptorProto
for _, t := range m.NestedType {
if *t.Name == shortName {
nt = t
break
}
}
if nt == nil {
return nil
}
for _, f := range nt.Field {
if *f.Name == "value" {
return f
}
}
return nil
}
// goTypeWithGoPackage types the field MESSAGE and ENUM with the go_package name.
// This method is an evolution of goTypeWithPackage. It handles message embedded.
//
// example:
// ```proto
// message GetArticleResponse {
// Article article = 1;
// message Storage {
// string code = 1;
// }
// repeated Storage storages = 2;
// }
// ```
// Then the type of `storages` is `GetArticleResponse_Storage` for the go language.
//
func goTypeWithGoPackage(p *descriptor.FileDescriptorProto, f *descriptor.FieldDescriptorProto) string {
pkg := ""
if *f.Type == descriptor.FieldDescriptorProto_TYPE_MESSAGE || *f.Type == descriptor.FieldDescriptorProto_TYPE_ENUM {
if isTimestampPackage(*f.TypeName) {
pkg = "timestamp"
} else {
pkg = *p.GetOptions().GoPackage
if strings.Contains(*p.GetOptions().GoPackage, ";") {
pkg = strings.Split(*p.GetOptions().GoPackage, ";")[1]
}
}
}
return goTypeWithEmbedded(pkg, f, p)
}
// Warning does not handle message embedded like goTypeWithGoPackage does.
func goTypeWithPackage(f *descriptor.FieldDescriptorProto) string {
pkg := ""
if *f.Type == descriptor.FieldDescriptorProto_TYPE_MESSAGE || *f.Type == descriptor.FieldDescriptorProto_TYPE_ENUM {
if isTimestampPackage(*f.TypeName) {
pkg = "timestamp"
} else {
pkg = getPackageTypeName(*f.TypeName)
}
}
return goType(pkg, f)
}
func haskellType(pkg string, f *descriptor.FieldDescriptorProto) string {
switch *f.Type {
case descriptor.FieldDescriptorProto_TYPE_DOUBLE:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[Float]"
}
return "Float"
case descriptor.FieldDescriptorProto_TYPE_FLOAT:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[Float]"
}
return "Float"
case descriptor.FieldDescriptorProto_TYPE_INT64:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[Int64]"
}
return "Int64"
case descriptor.FieldDescriptorProto_TYPE_UINT64:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[Word]"
}
return "Word"
case descriptor.FieldDescriptorProto_TYPE_INT32:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[Int]"
}
return "Int"
case descriptor.FieldDescriptorProto_TYPE_UINT32:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[Word]"
}
return "Word"
case descriptor.FieldDescriptorProto_TYPE_BOOL:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[Bool]"
}
return "Bool"
case descriptor.FieldDescriptorProto_TYPE_STRING:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[Text]"
}
return "Text"
case descriptor.FieldDescriptorProto_TYPE_MESSAGE:
if pkg != "" {
pkg = pkg + "."
}
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return fmt.Sprintf("[%s%s]", pkg, shortType(*f.TypeName))
}
return fmt.Sprintf("%s%s", pkg, shortType(*f.TypeName))
case descriptor.FieldDescriptorProto_TYPE_BYTES:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[Word8]"
}
return "Word8"
case descriptor.FieldDescriptorProto_TYPE_ENUM:
return fmt.Sprintf("%s%s", pkg, shortType(*f.TypeName))
default:
return "Generic"
}
}
func goTypeWithEmbedded(pkg string, f *descriptor.FieldDescriptorProto, p *descriptor.FileDescriptorProto) string {
if pkg != "" {
pkg = pkg + "."
}
switch *f.Type {
case descriptor.FieldDescriptorProto_TYPE_DOUBLE:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]float64"
}
return "float64"
case descriptor.FieldDescriptorProto_TYPE_FLOAT:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]float32"
}
return "float32"
case descriptor.FieldDescriptorProto_TYPE_INT64:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]int64"
}
return "int64"
case descriptor.FieldDescriptorProto_TYPE_UINT64:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]uint64"
}
return "uint64"
case descriptor.FieldDescriptorProto_TYPE_INT32:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]int32"
}
return "int32"
case descriptor.FieldDescriptorProto_TYPE_UINT32:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]uint32"
}
return "uint32"
case descriptor.FieldDescriptorProto_TYPE_BOOL:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]bool"
}
return "bool"
case descriptor.FieldDescriptorProto_TYPE_STRING:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]string"
}
return "string"
case descriptor.FieldDescriptorProto_TYPE_MESSAGE:
name := *f.TypeName
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
fieldPackage := strings.Split(*f.TypeName, ".")
filePackage := strings.Split(*p.Package, ".")
// check if we are working with a message embedded.
if len(fieldPackage) > 1 && len(fieldPackage)+1 > len(filePackage)+1 {
name = strings.Join(fieldPackage[len(filePackage)+1:], "_")
}
return fmt.Sprintf("[]*%s%s", pkg, shortType(name))
}
return fmt.Sprintf("*%s%s", pkg, shortType(name))
case descriptor.FieldDescriptorProto_TYPE_BYTES:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]byte"
}
return "byte"
case descriptor.FieldDescriptorProto_TYPE_ENUM:
name := *f.TypeName
fieldPackage := strings.Split(*f.TypeName, ".")
filePackage := strings.Split(*p.Package, ".")
// check if we are working with a message embedded.
if len(fieldPackage) > 1 && len(fieldPackage)+1 > len(filePackage)+1 {
name = strings.Join(fieldPackage[len(filePackage)+1:], "_")
}
return fmt.Sprintf("*%s%s", pkg, shortType(name))
default:
return "interface{}"
}
}
//Deprecated. Instead use goTypeWithEmbedded
func goType(pkg string, f *descriptor.FieldDescriptorProto) string {
if pkg != "" {
pkg = pkg + "."
}
switch *f.Type {
case descriptor.FieldDescriptorProto_TYPE_DOUBLE:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]float64"
}
return "float64"
case descriptor.FieldDescriptorProto_TYPE_FLOAT:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]float32"
}
return "float32"
case descriptor.FieldDescriptorProto_TYPE_INT64:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]int64"
}
return "int64"
case descriptor.FieldDescriptorProto_TYPE_UINT64:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]uint64"
}
return "uint64"
case descriptor.FieldDescriptorProto_TYPE_INT32:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]int32"
}
return "int32"
case descriptor.FieldDescriptorProto_TYPE_UINT32:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]uint32"
}
return "uint32"
case descriptor.FieldDescriptorProto_TYPE_BOOL:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]bool"
}
return "bool"
case descriptor.FieldDescriptorProto_TYPE_STRING:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]string"
}
return "string"
case descriptor.FieldDescriptorProto_TYPE_MESSAGE:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return fmt.Sprintf("[]*%s%s", pkg, shortType(*f.TypeName))
}
return fmt.Sprintf("*%s%s", pkg, shortType(*f.TypeName))
case descriptor.FieldDescriptorProto_TYPE_BYTES:
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return "[]byte"
}
return "byte"
case descriptor.FieldDescriptorProto_TYPE_ENUM:
return fmt.Sprintf("*%s%s", pkg, shortType(*f.TypeName))
default:
return "interface{}"
}
}
func goZeroValue(f *descriptor.FieldDescriptorProto) string {
const nilString = "nil"
if *f.Label == descriptor.FieldDescriptorProto_LABEL_REPEATED {
return nilString
}
switch *f.Type {
case descriptor.FieldDescriptorProto_TYPE_DOUBLE:
return "0.0"
case descriptor.FieldDescriptorProto_TYPE_FLOAT:
return "0.0"
case descriptor.FieldDescriptorProto_TYPE_INT64:
return "0"
case descriptor.FieldDescriptorProto_TYPE_UINT64:
return "0"
case descriptor.FieldDescriptorProto_TYPE_INT32:
return "0"
case descriptor.FieldDescriptorProto_TYPE_UINT32:
return "0"
case descriptor.FieldDescriptorProto_TYPE_BOOL:
return "false"
case descriptor.FieldDescriptorProto_TYPE_STRING:
return "\"\""
case descriptor.FieldDescriptorProto_TYPE_MESSAGE:
return nilString
case descriptor.FieldDescriptorProto_TYPE_BYTES:
return "0"
case descriptor.FieldDescriptorProto_TYPE_ENUM:
return nilString
default:
return nilString
}
}
func jsType(f *descriptor.FieldDescriptorProto) string {
template := "%s"
if isFieldRepeated(f) {
template = "Array<%s>"
}
switch *f.Type {
case descriptor.FieldDescriptorProto_TYPE_MESSAGE,
descriptor.FieldDescriptorProto_TYPE_ENUM:
return fmt.Sprintf(template, namespacedFlowType(*f.TypeName))
case descriptor.FieldDescriptorProto_TYPE_DOUBLE,
descriptor.FieldDescriptorProto_TYPE_FLOAT,
descriptor.FieldDescriptorProto_TYPE_INT64,
descriptor.FieldDescriptorProto_TYPE_UINT64,
descriptor.FieldDescriptorProto_TYPE_INT32,
descriptor.FieldDescriptorProto_TYPE_FIXED64,
descriptor.FieldDescriptorProto_TYPE_FIXED32,
descriptor.FieldDescriptorProto_TYPE_UINT32,
descriptor.FieldDescriptorProto_TYPE_SFIXED32,
descriptor.FieldDescriptorProto_TYPE_SFIXED64,
descriptor.FieldDescriptorProto_TYPE_SINT32,
descriptor.FieldDescriptorProto_TYPE_SINT64:
return fmt.Sprintf(template, "number")
case descriptor.FieldDescriptorProto_TYPE_BOOL:
return fmt.Sprintf(template, "boolean")
case descriptor.FieldDescriptorProto_TYPE_BYTES:
return fmt.Sprintf(template, "Uint8Array")
case descriptor.FieldDescriptorProto_TYPE_STRING:
return fmt.Sprintf(template, "string")
default:
return fmt.Sprintf(template, "any")
}
}
func jsSuffixReservedKeyword(s string) string {
return jsReservedRe.ReplaceAllString(s, "${1}${2}_${3}")
}
func isTimestampPackage(s string) bool {
var isTimestampPackage bool
if strings.Compare(s, ".google.protobuf.Timestamp") == 0 {
isTimestampPackage = true
}
return isTimestampPackage
}
func getPackageTypeName(s string) string {
if strings.Contains(s, ".") {
return strings.Split(s, ".")[1]
}
return ""
}
func shortType(s string) string {
t := strings.Split(s, ".")
return t[len(t)-1]
}
func namespacedFlowType(s string) string {
trimmed := strings.TrimLeft(s, ".")
splitted := strings.Split(trimmed, ".")
return strings.Join(splitted, "$")
}
func httpPath(m *descriptor.MethodDescriptorProto) string {
ext, err := proto.GetExtension(m.Options, options.E_Http)
if err != nil {
return err.Error()
}
opts, ok := ext.(*options.HttpRule)
if !ok {
return fmt.Sprintf("extension is %T; want an HttpRule", ext)
}
switch t := opts.Pattern.(type) {
default:
return ""
case *options.HttpRule_Get:
return t.Get
case *options.HttpRule_Post:
return t.Post
case *options.HttpRule_Put:
return t.Put
case *options.HttpRule_Delete:
return t.Delete
case *options.HttpRule_Patch:
return t.Patch
case *options.HttpRule_Custom:
return t.Custom.Path
}
}
func httpPathsAdditionalBindings(m *descriptor.MethodDescriptorProto) []string {
ext, err := proto.GetExtension(m.Options, options.E_Http)
if err != nil {
panic(err.Error())
}
opts, ok := ext.(*options.HttpRule)
if !ok {
panic(fmt.Sprintf("extension is %T; want an HttpRule", ext))
}
var httpPaths []string
var optsAdditionalBindings = opts.GetAdditionalBindings()
for _, optAdditionalBindings := range optsAdditionalBindings {
switch t := optAdditionalBindings.Pattern.(type) {
case *options.HttpRule_Get:
httpPaths = append(httpPaths, t.Get)
case *options.HttpRule_Post:
httpPaths = append(httpPaths, t.Post)
case *options.HttpRule_Put:
httpPaths = append(httpPaths, t.Put)
case *options.HttpRule_Delete:
httpPaths = append(httpPaths, t.Delete)
case *options.HttpRule_Patch:
httpPaths = append(httpPaths, t.Patch)
case *options.HttpRule_Custom:
httpPaths = append(httpPaths, t.Custom.Path)
default:
// nothing
}
}
return httpPaths
}
func | (m *descriptor.MethodDescriptorProto) string {
ext, err := proto.GetExtension(m.Options, options.E_Http)
if err != nil {
return err.Error()
}
opts, ok := ext.(*options.HttpRule)
if !ok {
return fmt.Sprintf("extension is %T; want an HttpRule", ext)
}
switch t := opts.Pattern.(type) {
default:
return ""
case *options.HttpRule_Get:
return "GET"
case *options.HttpRule_Post:
return "POST"
case *options.HttpRule_Put:
return "PUT"
case *options.HttpRule_Delete:
return "DELETE"
case *options.HttpRule_Patch:
return "PATCH"
case *options.HttpRule_Custom:
return t.Custom.Kind
}
}
func httpBody(m *descriptor.MethodDescriptorProto) string {
ext, err := proto.GetExtension(m.Options, options.E_Http)
if err != nil {
return err.Error()
}
opts, ok := ext.(*options.HttpRule)
if !ok {
return fmt.Sprintf("extension is %T; want an HttpRule", ext)
}
return opts.Body
}
func urlHasVarsFromMessage(path string, d *ggdescriptor.Message) bool {
for _, field := range d.Field {
if !isFieldMessage(field) {
if strings.Contains(path, fmt.Sprintf("{%s}", *field.Name)) {
return true
}
}
}
return false
}
// lowerGoNormalize takes a string and applies formatting
// rules to conform to Golang convention. It applies a camel
// case filter, lowers the first character and formats fields
// with `id` to `ID`.
func lowerGoNormalize(s string) string {
fmtd := xstrings.ToCamelCase(s)
fmtd = xstrings.FirstRuneToLower(fmtd)
return formatID(s, fmtd)
}
// goNormalize takes a string and applies formatting rules
// to conform to Golang convention. It applies a camel case
// filter and formats fields with `id` to `ID`.
func goNormalize(s string) string {
fmtd := xstrings.ToCamelCase(s)
return formatID(s, fmtd)
}
// formatID takes a base string alonsgide a formatted string.
// It acts as a transformation filter for fields containing
// `id` in order to conform to Golang convention.
func formatID(base string, formatted string) string {
if formatted == "" {
return formatted
}
switch {
case base == "id":
// id -> ID
return "ID"
case strings.HasPrefix(base, "id_"):
// id_some -> IDSome
return "ID" + formatted[2:]
case strings.HasSuffix(base, "_id"):
// some_id -> SomeID
return formatted[:len(formatted)-2] + "ID"
case strings.HasSuffix(base, "_ids"):
// some_ids -> SomeIDs
return formatted[:len(formatted)-3] + "IDs"
}
return formatted
}
func replaceDict(src string, dict map[string]interface{}) string {
for old, v := range dict {
new, ok := v.(string)
if !ok {
continue
}
src = strings.Replace(src, old, new, -1)
}
return src
}
func goPkg(f *descriptor.FileDescriptorProto) string {
return f.Options.GetGoPackage()
}
func goPkgLastElement(f *descriptor.FileDescriptorProto) string {
pkg := goPkg(f)
pkgSplitted := strings.Split(pkg, "/")
return pkgSplitted[len(pkgSplitted)-1]
}
| httpVerb |
random_forest.py | import os
import numpy as np
import pandas as pd
from sklearn.externals import joblib
from sklearn.linear_model import LogisticRegression
import os
import sys
print os.getcwd()
sys.path.append(os.getcwd()+"/SemanticLabelingAlgorithm/semantic_labeling/main")
sys.path.append(os.getcwd()+"/..")
from lib import searcher #change
from lib.utils import is_tree_based
from tests.integrated import feature_list, tree_feature_list
class MyRandomForest:
def __init__(self, data_sets=None, dataset_map=None, model_path=None):
self.data_sets = data_sets
self.dataset_map = dataset_map
self.model_path = model_path
self.model = None
self.feature_selector = None
def generate_train_data(self, train_sizes):
train_data = []
for data_set in self.data_sets:
train_data = []
index_config = {'name': data_set}
source_map = self.dataset_map[data_set]
double_name_list = source_map.values() * 2
for size in train_sizes:
for idx, source_name in enumerate(source_map.keys()):
train_names = [source.index_name for source in double_name_list[idx + 1: idx + size + 1]]
print("tain names", train_names)
print("i config", index_config )
train_examples_map = searcher.search_types_data(index_config, train_names)
source = source_map[source_name]
for column in source.column_map.values():
if column.semantic_type:
textual_train_map = searcher.search_similar_text_data(index_config,
column.value_text,
train_names)
feature_vectors = column.generate_candidate_types(train_examples_map, textual_train_map,
is_labeled=True)
train_data += feature_vectors
return train_data
def train(self, train_sizes):
if os.path.exists(self.model_path):
print "Loading ..."
self.model = joblib.load(self.model_path)
else:
train_df = self.generate_train_data(train_sizes)
train_df = pd.DataFrame(train_df)
train_df = train_df.replace([np.inf, -np.inf, np.nan], 0)
# self.model = LogisticRegression(n_estimators=200, combination="majority_voting")
self.model = LogisticRegression(class_weight="balanced")
# print train_df
# sample_weight = train_df['label'].apply(lambda x: 15 if x else 1)
# print sample_weight
if is_tree_based:
self.model.fit(train_df[tree_feature_list], train_df['label'])
else:
# self.model.fit(train_df[feature_list], train_df['label'])
self.model.fit(train_df[feature_list], train_df['label'])
# train_df[feature_list + ["label"]].to_csv("train.csv", mode='w', header=True)
# cost = len(train_df[train_df['label'] == False]) / len(train_df[train_df['label'] == True])
# self.model.fit(train_df[feature_list].as_matrix(), train_df['label'].as_matrix(),
# np.tile(np.array([1, cost, 0, 0]), (train_df.shape[0], 1)))
joblib.dump(self.model, self.model_path)
def predict(self, test_data, true_type):
test_df = pd.DataFrame(test_data)
test_df = test_df.replace([np.inf, -np.inf, np.nan], 0)
if is_tree_based:
|
else:
test_df['prob'] = [x[1] for x in self.model.predict_proba(test_df[feature_list].as_matrix())]
# test_df['prediction'] = [1 if x else 0 for x in self.model.predict(test_df[feature_list])]
test_df['truth'] = test_df['name'].map(lambda row: row.split("!")[0] == true_type)
test_df = test_df.sort_values(by=["prob"], ascending=[False]).head(4)
if os.path.exists("debug.csv"):
test_df.to_csv("debug.csv", mode='a', header=False)
else:
test_df.to_csv("debug.csv", mode='w', header=True)
return test_df[["prob", 'name']].T.to_dict().values()
| test_df['prob'] = [x[1] for x in self.model.predict_proba(test_df[tree_feature_list].as_matrix())] |
query.go | package types
import (
"github.com/gogo/protobuf/grpc"
client "github.com/cosmos/ibc-go/modules/core/02-client"
clienttypes "github.com/cosmos/ibc-go/modules/core/02-client/types"
connection "github.com/cosmos/ibc-go/modules/core/03-connection"
connectiontypes "github.com/cosmos/ibc-go/modules/core/03-connection/types"
channel "github.com/cosmos/ibc-go/modules/core/04-channel"
channeltypes "github.com/cosmos/ibc-go/modules/core/04-channel/types"
port "github.com/cosmos/ibc-go/modules/core/05-port"
porttypes "github.com/cosmos/ibc-go/modules/core/05-port/types"
)
// QueryServer defines the IBC interfaces that the gRPC query server must implement
type QueryServer interface {
clienttypes.QueryServer
connectiontypes.QueryServer
channeltypes.QueryServer
porttypes.QueryServer
}
// RegisterQueryService registers each individual IBC submodule query service
func RegisterQueryService(server grpc.Server, queryService QueryServer) | {
client.RegisterQueryService(server, queryService)
connection.RegisterQueryService(server, queryService)
channel.RegisterQueryService(server, queryService)
port.RegisterQueryService(server, queryService)
} |
|
utilsLinReg.py | import sklearn.metrics as metrics
import pandas as pd
import numpy as np
def repair_chrdata(df, tCol):
### Parameters:
# df: input dataframe
# tCol: targeted column label with NaN
### Output
# df: repaired dataframe
# word: string of related dataframe column with some records have NaN in targeted column
# count: number of records fixed in the targeted column with NaN
# work out number of NaN records need to fix
dFrm = df[df[tCol].isnull()]
count = len(dFrm)
# work out the fill up string (most appearance) at targeted column for NULL
tword = df[tCol].unique().tolist()
# print(tword)
wordLT = df[tCol].value_counts(dropna=False)
word = ''
wordCnt = 0
for index, value in wordLT.items():
print(f'[COUNT] Index: {index}, Value: {value}')
if wordCnt < value:
word = index
wordCnt = value
# print(word)
# print(wordLT)
# update the targeted NaN with the most frequent string
mask = df[tCol].isnull()
df.loc[mask, tCol] = word
print(f'[REPAIR] "{tCol}" with string: {word}, Count: {count}')
return df, word, count
# Repair a single number data column contained NaN with median value
def repair_numdata(df, tCol):
### Parameters:
# df: input dataframe
# tCol: targeted column label with NaN
### Output
# df: repaired dataframe
# medianVal: median value of related dataframe column with some records have NaN in targeted column
# count: number of records fixed in the targeted column with NaN
# work out number of NaN records need to fix
dFrm = df[df[tCol].isnull()]
count = len(dFrm)
# work out the median value of the records from targeted column
medianVal = df[tCol].median() | return df, medianVal, count
### Work out the educated guess targets to repair dataframe with NaN in 'repair_rdata' function
def repair_target(df, tCol, rCol):
### Parameters:
# df: input dataframe
# tCol: targeted column label with NaN
# rCol: related column label without NaN for educated guess
### Output
# target: column value of related column that have NaN in targeted column
repair = df[df[tCol].isnull()]
# print(repair[[rCol, tCol]])
target = sorted(repair[rCol].unique().tolist())
print(f'[TARGET] {tCol} NaN target: {target}')
return target
### Educated guess to repair dataframe column contained NaN with mean value of related
### dataframe column
def repair_rcdata(df, tCol, rCol, target):
### Parameters:
# df: input dataframe
# tCol: targeted column label with NaN
# rCol: related column label without NaN for educated guess
# target: column value of related column that have NaN in targeted column
### Output
# df: repaired dataframe
# meanVal: mean value of related dataframe column with some records have NaN in targeted column
# count: number of records fixed in the targeted column with NaN
### Main coding
# work out number of NaN records need to fix
dFrm = df[df[tCol].isnull()]
dFrm = dFrm[dFrm[rCol] == target]
count = len(dFrm)
# work out the mean value of the records from related column
repair = df.loc[df[rCol] == target]
meanVal = round(repair[tCol].mean(), 3)
if np.isnan(meanVal):
meanVal = np.float64(0)
# update the targeted NaN with the calculated mean value of related records
df[tCol] = df.apply(
lambda row: meanVal if np.isnan(row[tCol]) & (row[rCol] == target)
else row[tCol], axis=1
)
print(f'[REPAIR] {tCol}({target}) Mean: {meanVal}, Count: {count}')
return df, meanVal, count
def regression_results(y_true, y_pred):
# Regression metrics
explained_variance=metrics.explained_variance_score(y_true, y_pred)
mean_absolute_error=metrics.mean_absolute_error(y_true, y_pred)
mse=metrics.mean_squared_error(y_true, y_pred)
# mean_squared_log_error=metrics.mean_squared_log_error(y_true, y_pred)
# median_absolute_error=metrics.median_absolute_error(y_true, y_pred)
r2=metrics.r2_score(y_true, y_pred)
print('explained_variance: ', round(explained_variance,4))
# print('mean_squared_log_error: ', round(mean_squared_log_error,4))
print('r-squared (r2): ', round(r2,4))
print('mean_absolute_error (MAE): ', round(mean_absolute_error,4))
print('mean_squared_error (MSE): ', round(mse,4))
print('root_mean_squared_error (RMSE): ', round(np.sqrt(mse),4)) | # update the targeted NaN with the median value
mask = df[tCol].isnull()
df.loc[mask, tCol] = medianVal
print(f'[REPAIR] "{tCol}" Median: {medianVal}, Count: {count}') |
GAN.py | # ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.5'
# jupytext_version: 1.7.1
# kernelspec:
# display_name: Python 3
# name: python3
# ---
# %% [markdown]
# ```{note}
# If running in Colab, think of changing the runtime type before starting, in
# order to have access to GPU ressources: Runtime->Change Runtime Type, then
# chose GPU for hardware accelerator.
# ```
#
# %% [markdown]
# In this lab we will focus on **image synthesis**, in particular to
# synthesize **T2-weighted MRI** from **T1-weighted MRI**.
#
# We will investigate three approaches to do so:
#
# 1. First, we will train a generator (or encoder-decoder).
# 2. Then, we will train a conditional generative adversarial network (cGAN).
# 3. Finally, we will train a cycle generative adversarial network (CycleGAN).
#
# We will evaluate the quality of the generated images using several metrics.
#
# We will use the [IXI dataset](https://brain-development.org/ixi-dataset/)
# to have access to **paired T1-w and T2-w images**.
# Before creating and training the different neural networks,
# we will:
#
# 1. fetch the dataset,
# 2. have a look at it to see what the task looks like, and
# 3. illustrate how to easily access the data.
# %% [markdown]
# # 0. Fetching the dataset
#
# The dataset can be found on this
# [server](https://aramislab.paris.inria.fr/files/data/databases/DL4MI/IXI-dataset.tar.gz)
# and alternatively in the following
# [GitHub repository](https://github.com/Easternwen/IXI-dataset).
# In the `size64` folder, there are 1154 files: 2 images for 577 subjects.
# The size of each image is (64, 64).
#
# Let's download the file and have a look at the data.
# %%
# Get the dataset from the server
! git clone https://github.com/Easternwen/IXI-dataset.git
# %% [markdown]
# The dataset used in this lab is composed of preprocessed images from the
# [IXI dataset](https://brain-development.org/ixi-dataset/). Two different
# structural MRI modalities are comprised in this dataset:
#
# - T1 weighted images
#
# - T2 weighted images
#
# These modalities do not highlight the same tissues: for example the CSF
# voxels are cancelled in T1 weighted imaging whereas they are highlighted by
# the T2 weighted imaging.
# %%
import matplotlib.pyplot as plt
import numpy as np
import os
import torch
root = "./IXI-dataset/size64/"
plt.figure(figsize=(9, 4))
plt.subplot(1, 2, 1)
plt.imshow(np.swapaxes(torch.load(os.path.join(root, 'sub-IXI002 - T1.pt')), 0, 1),
cmap='gray', origin='lower')
plt.title("T1 slice for subject 002")
plt.axis('off')
plt.subplot(1, 2, 2)
plt.imshow(np.swapaxes(torch.load(os.path.join(root, 'sub-IXI002 - T2.pt')), 0, 1),
cmap='gray', origin='lower')
plt.title("T2 slice for subject 002")
plt.axis('off')
plt.show()
# %% [markdown]
# Let's import all the necessary packages.
# %%
# torch stuff
import torch
from torch import nn
from torch.utils.data import DataLoader
import torch.nn.functional as F
# torchsummary and torchvision
from torchsummary import summary
from torchvision.utils import save_image
# matplotlib stuff
import matplotlib.pyplot as plt
import matplotlib.image as img
# numpy and pandas
import numpy as np
import pandas as pd
# Common python packages
import datetime
import os
import sys
import time
# %% [markdown]
# Let's create a custom `IXIDataset` class to easily have access to the data.
# Here we don't use tsv files to split subjects between the training and the
# test set. We only set the dataset to the `train` or `test` mode to access
# training or test data.
# %%
class IXIDataset(torch.utils.data.Dataset):
"""Dataset utility class.
Args:
root: (str) Path of the folder with all the images.
mode : {'train' or 'test'} Part of the dataset that is loaded.
"""
def __init__(self, root, mode="train"):
files = sorted(os.listdir(root))
patient_id = list(set([i.split()[0] for i in files]))
imgs = []
if mode == "train":
for i in patient_id[:int(0.8 * len(patient_id))]:
if (
os.path.isfile(os.path.join(root, i + " - T1.pt")) and
os.path.isfile(os.path.join(root, i + " - T2.pt"))
):
imgs.append((os.path.join(root, i + " - T1.pt"),
os.path.join(root, i + " - T2.pt")))
elif mode == "test":
for i in patient_id[int(0.8 * len(patient_id)):]:
if (
os.path.isfile(os.path.join(root, i + " - T1.pt")) and
os.path.isfile(os.path.join(root, i + " - T2.pt"))
):
imgs.append((os.path.join(root, i + " - T1.pt"),
os.path.join(root, i + " - T2.pt")))
self.imgs = imgs
def __getitem__(self, index):
t1_path, t2_path = self.imgs[index]
t1 = torch.load(t1_path)[None, :, :]
t2 = torch.load(t2_path)[None, :, :]
return {"T1": t1, "T2": t2}
def __len__(self):
return len(self.imgs)
# %% [markdown]
# Using this class and the `DataLoader` class from `torch.utils.data`, you can
# easily have access to your dataset. Here is a quick example on how to use it:
#
# ```python
# # Create a DataLoader instance for the training set
# # You will get a batch of samples from the training set
# dataloader = DataLoader(
# IXIDataset(root, mode="train"),
# batch_size=1,
# shuffle=False,
# )
#
# for batch in dataloader:
# # batch is a dictionary with two keys:
# # - batch["T1"] is a tensor with shape (batch_size, 64, 64) with the T1 images for the samples in this batch
# # - batch["T2"] is a tensor with shape (batch_size, 64, 64) with the T2 images for the samples in this batch
# ```
# %% [markdown]
# # 1. Generator
#
# ## 1.1 Architecture
#
# The generator will have a **U-Net architecture** with the following
# characteristics:
#
# * the descending blocks are convolutional layers followed by instance
# normalization with a LeakyReLU activation function;
#
# * the ascending blocks are transposed convolutional layers followed by
# instance normalization with a ReLU activation function.
#
# The parameters for each layer are given in the picture below.
# %% [markdown]
# <a href="https://ibb.co/QXBDNy3">
# <img src="https://i.ibb.co/g614TkL/Capture-d-cran-2020-03-02-16-04-06.png" width="800" border="0">
# </a>
# %% [markdown]
#
# <div class="alert alert-block alert-info">
# <b>Exercise</b>: Create a <code>GeneratorUNet</code> class to define the
# generator with the architecture given above.
# </div>
# %%
# We provide classes for each block of the U-Net.
class UNetDown(nn.Module):
"""Descending block of the U-Net.
Args:
in_size: (int) number of channels in the input image.
out_size : (int) number of channels in the output image.
"""
def __init__(self, in_size, out_size):
super(UNetDown, self).__init__()
self.model = nn.Sequential(
nn.Conv2d(in_size, out_size, kernel_size=3, stride=2, padding=1),
nn.InstanceNorm2d(out_size),
nn.LeakyReLU(0.2)
)
def forward(self, x):
return self.model(x)
class UNetUp(nn.Module):
"""Ascending block of the U-Net.
Args:
in_size: (int) number of channels in the input image.
out_size : (int) number of channels in the output image.
"""
def __init__(self, in_size, out_size):
super(UNetUp, self).__init__()
self.model = nn.Sequential(
nn.ConvTranspose2d(in_size, out_size, kernel_size=4,
stride=2, padding=1),
nn.InstanceNorm2d(out_size),
nn.ReLU(inplace=True)
)
def forward(self, x, skip_input=None):
if skip_input is not None:
x = torch.cat((x, skip_input), 1) # add the skip connection
x = self.model(x)
return x
class FinalLayer(nn.Module):
"""Final block of the U-Net.
Args:
in_size: (int) number of channels in the input image.
out_size : (int) number of channels in the output image.
"""
def __init__(self, in_size, out_size):
super(FinalLayer, self).__init__()
self.model = nn.Sequential(
nn.Upsample(scale_factor=2),
nn.Conv2d(in_size, out_size, kernel_size=3, padding=1),
nn.Tanh(),
)
def forward(self, x, skip_input=None):
if skip_input is not None:
x = torch.cat((x, skip_input), 1) # add the skip connection
x = self.model(x)
return x
# %%
class GeneratorUNet(nn.Module):
def __init__(self, in_channels=1, out_channels=1):
super(GeneratorUNet, self).__init__()
self.down1 = UNetDown(in_channels, 64)
self.down2 = UNetDown(64, 128)
self.down3 = UNetDown(128, 256)
self.down4 = UNetDown(256, 512)
self.down5 = UNetDown(512, 512)
self.up1 = UNetUp(512, 512)
self.up2 = UNetUp(1024, 256)
self.up3 = UNetUp(512, 128)
self.up4 = UNetUp(256, 64)
self.final = FinalLayer(128, 1)
def forward(self, x):
|
# %% [markdown]
# Let's have a look at the architecture of our generator:
# %%
# Summary of the generator
summary(GeneratorUNet().cuda(), (1, 64, 64) )
# %% [markdown]
# ## 1.2 Train the generator
#
# In order to train the generator, we will repeat the following process:
#
# 1. Generate T2-w images from T1-w images.
# 2. Compute the error between the true T2-w images and the generated T2-w images.
# 3. Update the parameters of the generators.
#
# The training phase looks like this:
#
# ```
# # For each epoch
#
# # For each batch
#
# # Generate fake images for all the images in this batch
#
# # Compute the loss for the generator
#
# # Perform one optimization step
# ```
# %% [markdown]
# <div class="alert alert-block alert-info">
# <b>Exercise</b>: We provide below a template to train our generator
# on the dataset. Fill in the missing parts.
# </div>
# %%
def train_generator(train_loader, test_loader, num_epoch=500,
lr=0.0001, beta1=0.9, beta2=0.999):
"""Train a generator on its own.
Args:
train_loader: (DataLoader) a DataLoader wrapping the training dataset
test_loader: (DataLoader) a DataLoader wrapping the test dataset
num_epoch: (int) number of epochs performed during training
lr: (float) learning rate of the discriminator and generator Adam optimizers
beta1: (float) beta1 coefficient of the discriminator and generator Adam optimizers
beta2: (float) beta1 coefficient of the discriminator and generator Adam optimizers
Returns:
generator: (nn.Module) the trained generator
"""
cuda = True if torch.cuda.is_available() else False
print(f"Using cuda device: {cuda}") # check if GPU is used
# Tensor type (put everything on GPU if possible)
Tensor = torch.cuda.FloatTensor if cuda else torch.FloatTensor
# Output folder
if not os.path.exists("./images/generator"):
os.makedirs("./images/generator")
# Loss function
criterion = torch.nn.L1Loss() # A loss for a voxel-wise comparison of images like torch.nn.L1Loss
# Initialize the generator
generator = GeneratorUNet()
if cuda:
generator = generator.cuda()
criterion.cuda()
# Optimizer
optimizer = torch.optim.Adam(generator.parameters(),
lr=lr, betas=(beta1, beta2))
def sample_images(epoch):
"""Saves a generated sample from the validation set"""
imgs = next(iter(test_loader))
real_A = imgs["T1"].type(Tensor)
real_B = imgs["T2"].type(Tensor)
fake_B = generator(real_A)
img_sample = torch.cat((real_A.data, fake_B.data, real_B.data), -2)
save_image(img_sample, f"./images/generator/epoch-{epoch}.png",
nrow=5, normalize=True)
# ----------
# Training
# ----------
prev_time = time.time()
for epoch in range(num_epoch):
for i, batch in enumerate(train_loader):
# Inputs T1-w and T2-w
real_t1 = batch["T1"].type(Tensor)
real_t2 = batch["T2"].type(Tensor)
# Remove stored gradients
optimizer.zero_grad()
# Generate fake T2 images from the true T1 images
fake_t2 = generator(real_t1)
# Compute the corresponding loss
loss = criterion(fake_t2, real_t2)
# Compute the gradient and perform one optimization step
loss.backward()
optimizer.step()
# --------------
# Log Progress
# --------------
# Determine approximate time left
batches_done = epoch * len(train_loader) + i
batches_left = num_epoch * len(train_loader) - batches_done
time_left = datetime.timedelta(
seconds=batches_left * (time.time() - prev_time))
prev_time = time.time()
# Print log
sys.stdout.write(
"\r[Epoch %d/%d] [Batch %d/%d] [Loss: %f] ETA: %s"
% (
epoch + 1,
num_epoch,
i,
len(train_loader),
loss.item(),
time_left,
)
)
# Save images at the end of each epoch
sample_images(epoch)
return generator
# %% {"tags": ["remove_output"]}
# Parameters for Adam optimizer
lr = 0.0002
beta1 = 0.5
beta2 = 0.999
# Create dataloaders
batch_size = 40
train_loader = DataLoader(IXIDataset(root, mode="train"),
batch_size=batch_size,
shuffle=True)
test_loader = DataLoader(IXIDataset(root, mode="test"),
batch_size=5,
shuffle=False)
# Number of epochs
num_epoch = 20
# Train the generator
generator = train_generator(train_loader, test_loader, num_epoch=num_epoch,
lr=lr, beta1=beta1, beta2=beta2)
# %% [markdown]
# ## 1.3 Evaluate the generator
#
# Let's visualize a few generated T2-weighted images:
# %%
im = img.imread(f'./images/generator/epoch-{num_epoch - 1}.png')
plt.figure(figsize=(20, 20))
plt.imshow(np.swapaxes(im, 0, 1))
plt.gca().invert_yaxis()
plt.show()
# %% [markdown]
# After doing visual quality control, it is a good idea to quantify the quality
# of the generated images using specific metrics. Some popular metrics include
# the Mean Absolute Error (MAE), the Peak Signal-to-Noise Ratio (PSNR) and
# the Structural Similarity index (SSIM):
#
# * MAE = $\displaystyle \frac{1}{nm} \sum_{i=1}^n \sum_{j=1}^m \vert T_{ij} - G_{ij} \vert $
#
# * PSNR = $\displaystyle -10 \log_{10} \left( \frac{1}{nm} \sum_{i=1}^n \sum_{j=1}^m (T_{ij} - G_{ij})^2 \right) $
#
# * SSIM = $\displaystyle \frac{(2 \mu_T \mu_G + C_1)(2 \sigma_{TG} + C_2)}{(\mu_T^2 +
# \mu_G^2 + C_1)(\sigma_T^2 + \sigma_G^2 + C_2)} $ where:
#
# * $\mu$ and $\sigma$ are the mean value and standard deviation of an image respectively, and
# * $C_1$ and $C_2$ are two positive constants (one can take $C_1=0.01$ and $C_2=0.03$).
#
# The [mean absolute error](https://en.wikipedia.org/wiki/Mean_absolute_error)
# is simply the mean of each absolute value of the difference between
# the true pixel ($T_{ij}$) and the generated pixel ($G_{ij}$).
# The lower, the better. Minimum value is 0.
#
# The [peak signal-to-noise ratio](https://en.wikipedia.org/wiki/Peak_signal-to-noise_ratio)
# is a function of the mean squared error and allows for comparing
# images encoded with different scales. We simplified its formula in our case.
# The higher, the better. Maximum value is $+\infty$.
#
# The [structural similarity index](https://en.wikipedia.org/wiki/Structural_similarity)
# is a weighted combination of the luminance, the contrast and the structure.
# The higher, the better. Maximum value is 1.
#
# For those interested, you can find [here](https://www.pyimagesearch.com/2014/09/15/python-compare-two-images/)
# a reference to better understand the differences between these metrics.
#
# We provide an implementation for each metric with the functions below.
# %%
def mean_absolute_error(image_true, image_generated):
"""Compute mean absolute error.
Args:
image_true: (Tensor) true image
image_generated: (Tensor) generated image
Returns:
mse: (float) mean squared error
"""
return torch.abs(image_true - image_generated).mean()
def peak_signal_to_noise_ratio(image_true, image_generated):
""""Compute peak signal-to-noise ratio.
Args:
image_true: (Tensor) true image
image_generated: (Tensor) generated image
Returns:
psnr: (float) peak signal-to-noise ratio"""
mse = ((image_true - image_generated) ** 2).mean().cpu()
return -10 * np.log10(mse)
def structural_similarity_index(image_true, image_generated, C1=0.01, C2=0.03):
"""Compute structural similarity index.
Args:
image_true: (Tensor) true image
image_generated: (Tensor) generated image
C1: (float) variable to stabilize the denominator
C2: (float) variable to stabilize the denominator
Returns:
ssim: (float) mean squared error"""
mean_true = image_true.mean()
mean_generated = image_generated.mean()
std_true = image_true.std()
std_generated = image_generated.std()
covariance = (
(image_true - mean_true) * (image_generated - mean_generated)).mean()
numerator = (2 * mean_true * mean_generated + C1) * (2 * covariance + C2)
denominator = ((mean_true ** 2 + mean_generated ** 2 + C1) *
(std_true ** 2 + std_generated ** 2 + C2))
return numerator / denominator
# %% [markdown]
# We will now evaluate the generator with these three metrics on both the
# training set and the test set by computing the mean value for each metric.
# %%
def evaluate_generator(generator):
"""Evaluate a generator.
Args:
generator: (GeneratorUNet) neural network generating T2-w images
"""
res_train, res_test = [], []
cuda = True if torch.cuda.is_available() else False
Tensor = torch.cuda.FloatTensor if cuda else torch.FloatTensor
with torch.no_grad():
for i, batch in enumerate(train_loader):
# Inputs T1-w and T2-w
real_t1 = batch["T1"].type(Tensor)
real_t2 = batch["T2"].type(Tensor)
fake_t2 = generator(real_t1)
mae = mean_absolute_error(real_t2, fake_t2).item()
psnr = peak_signal_to_noise_ratio(real_t2, fake_t2).item()
ssim = structural_similarity_index(real_t2, fake_t2).item()
res_train.append([mae, psnr, ssim])
for i, batch in enumerate(test_loader):
# Inputs T1-w and T2-w
real_t1 = batch["T1"].type(Tensor)
real_t2 = batch["T2"].type(Tensor)
fake_t2 = generator(real_t1)
mae = mean_absolute_error(real_t2, fake_t2).item()
psnr = peak_signal_to_noise_ratio(real_t2, fake_t2).item()
ssim = structural_similarity_index(real_t2, fake_t2).item()
res_test.append([mae, psnr, ssim])
df = pd.DataFrame([
pd.DataFrame(res_train, columns=['MAE', 'PSNR', 'SSIM']).mean().squeeze(),
pd.DataFrame(res_test, columns=['MAE', 'PSNR', 'SSIM']).mean().squeeze()
], index=['Training set', 'Test set']).T
return df
# %%
evaluate_generator(generator)
# %% [markdown]
# The performance is already really good! The task may be pretty easy.
# Let's see if we can still improve the performance with a more complex neural
# network.
# %% [markdown]
# # 2. Conditional Generative Adversarial Network (cGAN)
#
# A generative adversarial network (GAN) is a network generating new samples.
# A typical GAN consists of two networks:
#
# * a **generator** that generates new samples, and
# * a **discriminator** that discriminate generated samples from true samples.
#
# One can think of the generator as a *counterfeiter* and the discriminator
# as a *authenticator*.
# The discriminator aims at improving the generator by having an opposition.
# The discriminator must not be too good, otherwise the generator won't improve.
# The generator and the discriminator are trained simultaneously and help
# each other improve.
#
# A conditional generative adversarial network (cGAN) is a particular case
# of a GAN that is conditioned by the true sample.
# A conditional GAN can thus only be used when **paired samples** are available.
# %% [markdown]
# ## 2.1 Architecture of the cGAN
#
# Like a GAN, a cGAN has two networks:
#
# * a **generator** that generates new samples, and
#
# * a **discriminator** that discriminate generated samples from true samples.
#
# We will keep the same architecture for the generator.
#
# For the discriminator we will use a **two-dimensional convolutional neural
# network** with 5 layers:
#
# * the first 4 layers are 2D-convolutional layers with a LeakyReLU activation
# function;
#
# * the last layer is a 2D-convolutional layer.
#
# The parameters for each layer are given in the figure below. Don't forget
# that the input of the discriminator will be the generated image and the true
# image since we are using a conditional GAN. Therefore, the number of input
# channels for the first layer will be two (one for each image).
# %% [markdown]
# <a href="https://ibb.co/9b2jF0V">
# <img src="https://i.ibb.co/hBHvPNZ/Capture-d-cran-2020-03-02-16-04-14.png" width="800" border="0">
# </a>
# %% [markdown]
# <div class="alert alert-block alert-info">
# <b>Exercise</b>: Create a <code>Discriminator</code> class to define the
# discriminator with the architecture given above.
# </div>
# %%
# We provide a function to generate a block for the given architecture.
def discriminator_block(in_filters, out_filters):
"""Return downsampling layers of each discriminator block"""
layers = [nn.Conv2d(in_filters, out_filters, 3, stride=2, padding=1)]
layers.append(nn.LeakyReLU(0.2, inplace=True))
return nn.Sequential(*layers)
class Discriminator(nn.Module):
def __init__(self, in_channels=1):
super(Discriminator, self).__init__()
self.layers = nn.Sequential(
discriminator_block(in_channels*2, 64),
discriminator_block(64, 128),
discriminator_block(128, 256),
discriminator_block(256, 512),
nn.Conv2d(512, 1, 1, stride=4, padding=0),
)
def forward(self, img_A, img_B):
# Concatenate image and condition image by channels to produce input
img_input = torch.cat((img_A, img_B), 1)
return self.layers(img_input)
# %% [markdown]
# Let's have a look at the architecture of our discriminator:
# %%
# Summary of the discriminator
summary(Discriminator().cuda(), [(1, 64, 64), (1, 64, 64)])
# %% [markdown]
# ## 2.2 Training our conditional GAN
#
# Now that we have created our generator and our discriminator, we have to
# train them on the dataset.
#
# **Notations**
#
# * $X_{T1}$: true T1 image;
# * $X_{T2}$: true T2 image;
# * $\tilde{X}_{T2}$: generated T2 image from $X_{T1}$;
# * $\hat{y}_{X}$: probability returned by the discriminator that the ${X}_{T2}$ is real;
# * $\hat{y}_{\tilde{X}}$: probability returned by the discriminator that the $\tilde{X}_{T2}$ is real.
#
# **Training the generator**
#
# The loss for the generator is the sum of:
#
# * the binary cross-entropy loss between the predicted probabilities of the
# generated images and positive labels,
# * the pixel-wise mean absolute error between the generated image and the true
# image.
#
# For one sample, it is then:
#
# $$
# \ell_G = - \log(\hat{y}_{\tilde{X}}) + \lambda * \text{MAE}(X_{T2}, \tilde{X}_{T2})
# $$
#
# **Training the discriminator**
#
# The loss for the generator is the mean of:
#
# * the binary cross-entropy loss between the predicted probabilities of the
# generated images and negative labels,
# * the binary cross-entropy loss between the predicted probabilities
# of the true images and positive labels.
#
# For one sample, it is then:
#
# $$
# \ell_D = - 0.5 * \log(\hat{y}_{X}) - 0.5 * \log(1 - \hat{y}_{\tilde{X}})
# $$
#
# **Training phase**
#
# The generator and the discriminator are trained simultaneously, which makes
# the training phase look like this:
#
# ```
# # For each epoch
#
# # For each batch
#
# # Generate fake images for all the images in this batch
#
# # Compute the loss for the generator and perform one optimization step
#
# # Compute the loss for the discriminator and perform one optimization step
# ```
# %% [markdown]
# <div class="alert alert-block alert-info">
# <b>Exercise</b>: We provide below a template to train our conditional GAN
# on the dataset. Fill in the missing parts.
# </div>
# %%
def train_cgan(train_loader, test_loader, num_epoch=500,
lr=0.0001, beta1=0.9, beta2=0.999):
"""Train a conditional GAN.
Args:
train_loader: (DataLoader) a DataLoader wrapping a the training dataset
test_loader: (DataLoader) a DataLoader wrapping a the test dataset
num_epoch: (int) number of epochs performed during training
lr: (float) learning rate of the discriminator and generator Adam optimizers
beta1: (float) beta1 coefficient of the discriminator and generator Adam optimizers
beta2: (float) beta1 coefficient of the discriminator and generator Adam optimizers
Returns:
generator: (nn.Module) the trained generator
"""
cuda = True if torch.cuda.is_available() else False
print(f"Using cuda device: {cuda}") # check if GPU is used
# Tensor type (put everything on GPU if possible)
Tensor = torch.cuda.FloatTensor if cuda else torch.FloatTensor
# Output folder
if not os.path.exists("./images/cgan"):
os.makedirs("./images/cgan")
# Loss functions
criterion_GAN = torch.nn.BCEWithLogitsLoss() # A loss adapted to binary classification like torch.nn.BCEWithLogitsLoss
criterion_pixelwise = torch.nn.L1Loss() # A loss for a voxel-wise comparison of images like torch.nn.L1Loss
lambda_GAN = 1. # Weights criterion_GAN in the generator loss
lambda_pixel = 1. # Weights criterion_pixelwise in the generator loss
# Initialize generator and discriminator
generator = GeneratorUNet()
discriminator = Discriminator()
if cuda:
generator = generator.cuda()
discriminator = discriminator.cuda()
criterion_GAN.cuda()
criterion_pixelwise.cuda()
# Optimizers
optimizer_generator = torch.optim.Adam(
generator.parameters(), lr=lr, betas=(beta1, beta2))
optimizer_discriminator = torch.optim.Adam(
discriminator.parameters(), lr=lr, betas=(beta1, beta2))
def sample_images(epoch):
"""Saves a generated sample from the validation set"""
imgs = next(iter(test_loader))
real_t1 = imgs["T1"].type(Tensor)
real_t2 = imgs["T2"].type(Tensor)
fake_t2 = generator(real_t1)
img_sample = torch.cat((real_t1.data, fake_t2.data, real_t2.data), -2)
save_image(img_sample, f"./images/cgan/epoch-{epoch}.png",
nrow=5, normalize=True)
# ----------
# Training
# ----------
prev_time = time.time()
for epoch in range(num_epoch):
for i, batch in enumerate(train_loader):
# Inputs T1-w and T2-w
real_t1 = batch["T1"].type(Tensor)
real_t2 = batch["T2"].type(Tensor)
# Create labels
valid = Tensor(np.ones((real_t2.size(0), 1, 1, 1)))
fake = Tensor(np.zeros((real_t2.size(0), 1, 1, 1)))
# -----------------
# Train Generator
# -----------------
optimizer_generator.zero_grad()
# GAN loss
fake_t2 = generator(real_t1)
pred_fake = discriminator(fake_t2, real_t1)
loss_GAN = criterion_GAN(pred_fake, valid)
# L1 loss
loss_pixel = criterion_pixelwise(fake_t2, real_t2)
# Total loss
loss_generator = lambda_GAN * loss_GAN + lambda_pixel * loss_pixel
# Compute the gradient and perform one optimization step
loss_generator.backward()
optimizer_generator.step()
# ---------------------
# Train Discriminator
# ---------------------
optimizer_discriminator.zero_grad()
# Real loss
pred_real = discriminator(real_t2, real_t1)
loss_real = criterion_GAN(pred_real, valid)
# Fake loss
fake_t2 = generator(real_t1)
pred_fake = discriminator(fake_t2.detach(), real_t1)
loss_fake = criterion_GAN(pred_fake, fake)
# Total loss
loss_discriminator = 0.5 * (loss_real + loss_fake)
# Compute the gradient and perform one optimization step
loss_discriminator.backward()
optimizer_discriminator.step()
# --------------
# Log Progress
# --------------
# Determine approximate time left
batches_done = epoch * len(train_loader) + i
batches_left = num_epoch * len(train_loader) - batches_done
time_left = datetime.timedelta(
seconds=batches_left * (time.time() - prev_time))
prev_time = time.time()
# Print log
sys.stdout.write(
"\r[Epoch %d/%d] [Batch %d/%d] [D loss: %f] "
"[G loss: %f, pixel: %f, adv: %f] ETA: %s"
% (
epoch + 1,
num_epoch,
i,
len(train_loader),
loss_discriminator.item(),
loss_generator.item(),
loss_pixel.item(),
loss_GAN.item(),
time_left,
)
)
# Save images at the end of each epoch
sample_images(epoch)
return generator
# %% {"tags": ["remove_output"]}
generator_cgan = train_cgan(train_loader, test_loader, num_epoch=num_epoch,
lr=lr, beta1=beta1, beta2=beta2)
# %% [markdown]
# ## 2.3 Evaluating the generator of our cGAN
#
# Let's visualize a few generated T2-weighted images:
# %%
plt.figure(figsize=(20, 20))
im = img.imread(f'./images/cgan/epoch-{num_epoch - 1}.png')
plt.imshow(np.swapaxes(im, 0, 1))
plt.gca().invert_yaxis()
plt.show()
# %% [markdown]
# We will now evaluate the generator of the cGAN with the same three metrics
# on both the training set and the test set by computing the mean value for
# each metric:
# %%
evaluate_generator(generator_cgan)
# %% [markdown]
# The performance is slightly lower for the cGAN, which is a bit disappointing.
# This may be explained by the ease of the task and increase the complexity of
# the architecture does not help. Another possibility would be to increase the
# number of epochs, which we did not change in comparison to the architecture
# with the generator alone.
#
# Let's now try a more general approach that does not require paired samples.
# %% [markdown]
# # 3. CycleGAN
# A cycle generative adversarial network (CycleGAN) is a technique for training
# **unsupervised image translation** models via the GAN architecture using
# unpaired collections of images from two different domains.
# The main innovation of a CycleGAN is to introduce a
# **cycle consistency loss** to enforce good reconstruction in both domains.
#
# A CycleGAN consists of two GAN:
#
# * one generating samples from domain *A* to domain *B*, and
#
# * another one generating from domain *B* to domain *A*.
#
# The cycle consistency consists in generating from one domain to the other
# domain then generating back from the second domain to the first domain, and
# comparing the generated sample from the original sample.
# The image below (taken from the [original paper introducing CycleGAN](https://arxiv.org/pdf/1703.10593.pdf))
# summarizes the main concepts of a CycleGAN:
# %% [markdown]
# <a href="https://ibb.co/WtS49wR">
# <img src="https://i.ibb.co/2NX12Qp/cyclegan.png" width="800" border="0">
# </a>
# %% [markdown]
# ## 3.1 Architecture of the CycleGAN
#
# The discriminators of the CycleGAN do not have the true sample as input.
# Thus, we have to remove the true sample in the `forward` method.
#
# Otherwise, we will use the same architectures for the generators and for
# the discriminators as the ones from the conditional GAN.
# %%
class DiscriminatorCycle(nn.Module):
def __init__(self, in_channels=1):
super(DiscriminatorCycle, self).__init__()
layers = []
layers.extend(discriminator_block(in_channels, 64))
layers.extend(discriminator_block(64, 128))
layers.extend(discriminator_block(128, 256))
layers.extend(discriminator_block(256, 512))
layers.append(nn.Conv2d(512, 1, 4, padding=0))
self.model = nn.Sequential(*layers)
def forward(self, img):
return self.model(img)
# %% [markdown]
# Let's have a look at the architecture of one discriminator in our CycleGAN:
# %%
# Summary of one discriminator in the CycleGAN
summary(DiscriminatorCycle().cuda(), [(1, 64, 64)])
# %% [markdown]
# ## 3.2 Training the CycleGAN
# The generators and the discriminators are trained simultaneously, which makes
# the training phase look like this:
#
# ```
# # For each epoch
#
# # For each batch
#
# # T1 -> T2 -> T1 cycle
#
# ## Generate fake T2-weighted images for all the T1-weighted images in this batch
#
# ## Generate fake T1-weighted images for all the fake generated T2-weighted images in this batch
#
# ## Compute the loss for both generators and perform one optimization step
#
# ## Compute the loss for both discriminators and perform one optimization step
#
# # T2 -> T1 -> T2 cycle
#
# ## Generate fake T1-weighted images for all the T2-weighted images in this batch
#
# ## Generate fake T2-weighted images for all the fake generated T1-weighted images in this batch
#
# ## Compute the loss for both generators and perform one optimization step
#
# ## Compute the loss for both discriminators and perform one optimization step
# ```
# %% [markdown]
# <div class="alert alert-block alert-info">
# <b>Exercise</b>: We provide below a template to train our cycle GAN
# on the dataset. Fill in the missing parts.
# </div>
# %%
def train_cyclegan(train_loader, test_loader, num_epoch=500,
lr=0.0001, beta1=0.9, beta2=0.999):
"""Train a CycleGAN.
Args:
train_loader: (DataLoader) a DataLoader wrapping a the training dataset
test_loader: (DataLoader) a DataLoader wrapping a the test dataset
num_epoch: (int) number of epochs performed during training
lr: (float) learning rate of the discriminator and generator Adam optimizers
beta1: (float) beta1 coefficient of the discriminator and generator Adam optimizers
beta2: (float) beta1 coefficient of the discriminator and generator Adam optimizers
Returns:
generator: (nn.Module) the generator generating T2-w images from T1-w images.
"""
cuda = True if torch.cuda.is_available() else False
print(f"Using cuda device: {cuda}") # check if GPU is used
# Tensor type (put everything on GPU if possible)
Tensor = torch.cuda.FloatTensor if cuda else torch.FloatTensor
# Output folder
if not os.path.exists("./images/cyclegan"):
os.makedirs("./images/cyclegan")
# Loss functions
criterion_GAN_from_t1_to_t2 = torch.nn.BCEWithLogitsLoss() # A loss adapted to binary classification like torch.nn.BCEWithLogitsLoss
criterion_GAN_from_t2_to_t1 = torch.nn.BCEWithLogitsLoss() # A loss adapted to binary classification like torch.nn.BCEWithLogitsLoss
criterion_pixelwise_from_t1_to_t2 = torch.nn.L1Loss() # A loss for a voxel-wise comparison of images like torch.nn.L1Loss
criterion_pixelwise_from_t2_to_t1 = torch.nn.L1Loss() # A loss for a voxel-wise comparison of images like torch.nn.L1Loss
lambda_GAN = 1. # Weights criterion_GAN in the generator loss
lambda_pixel = 1. # Weights criterion_pixelwise in the generator loss
# Initialize generators and discriminators
generator_from_t1_to_t2 = GeneratorUNet()
generator_from_t2_to_t1 = GeneratorUNet()
discriminator_from_t1_to_t2 = DiscriminatorCycle()
discriminator_from_t2_to_t1 = DiscriminatorCycle()
if cuda:
generator_from_t1_to_t2 = generator_from_t1_to_t2.cuda()
generator_from_t2_to_t1 = generator_from_t2_to_t1.cuda()
discriminator_from_t1_to_t2 = discriminator_from_t1_to_t2.cuda()
discriminator_from_t2_to_t1 = discriminator_from_t2_to_t1.cuda()
criterion_GAN_from_t1_to_t2 = criterion_GAN_from_t1_to_t2.cuda()
criterion_GAN_from_t2_to_t1 = criterion_GAN_from_t2_to_t1.cuda()
criterion_pixelwise_from_t1_to_t2 = criterion_pixelwise_from_t1_to_t2.cuda()
criterion_pixelwise_from_t2_to_t1 = criterion_pixelwise_from_t2_to_t1.cuda()
# Optimizers
optimizer_generator_from_t1_to_t2 = torch.optim.Adam(
generator_from_t1_to_t2.parameters(), lr=lr, betas=(beta1, beta2))
optimizer_generator_from_t2_to_t1 = torch.optim.Adam(
generator_from_t2_to_t1.parameters(), lr=lr, betas=(beta1, beta2))
optimizer_discriminator_from_t1_to_t2 = torch.optim.Adam(
discriminator_from_t1_to_t2.parameters(), lr=lr, betas=(beta1, beta2))
optimizer_discriminator_from_t2_to_t1 = torch.optim.Adam(
discriminator_from_t2_to_t1.parameters(), lr=lr, betas=(beta1, beta2))
def sample_images(epoch):
"""Saves a generated sample from the validation set"""
imgs = next(iter(test_loader))
real_t1 = imgs["T1"].type(Tensor)
real_t2 = imgs["T2"].type(Tensor)
fake_t2 = generator_from_t1_to_t2(real_t1)
img_sample = torch.cat((real_t1.data, fake_t2.data, real_t2.data), -2)
save_image(img_sample, f"./images/cyclegan/epoch-{epoch}.png",
nrow=5, normalize=True)
# ----------
# Training
# ----------
prev_time = time.time()
for epoch in range(num_epoch):
for i, batch in enumerate(train_loader):
# Inputs T1-w and T2-w
real_t1 = batch["T1"].type(Tensor)
real_t2 = batch["T2"].type(Tensor)
# Create labels
valid_t1 = Tensor(np.ones((real_t1.size(0), 1, 1, 1)))
imitation_t1 = Tensor(np.zeros((real_t1.size(0), 1, 1, 1)))
valid_t2 = Tensor(np.ones((real_t2.size(0), 1, 1, 1)))
imitation_t2 = Tensor(np.zeros((real_t2.size(0), 1, 1, 1)))
# ------------------
# Train Generators
# ------------------
optimizer_generator_from_t1_to_t2.zero_grad()
optimizer_generator_from_t2_to_t1.zero_grad()
# GAN loss
fake_t2 = generator_from_t1_to_t2(real_t1)
pred_fake_t2 = discriminator_from_t1_to_t2(fake_t2)
loss_GAN_from_t1_to_t2 = criterion_GAN_from_t1_to_t2(pred_fake_t2, valid_t2)
fake_t1 = generator_from_t2_to_t1(real_t2)
pred_fake_t1 = discriminator_from_t2_to_t1(fake_t1)
loss_GAN_from_t2_to_t1 = criterion_GAN_from_t2_to_t1(pred_fake_t1, valid_t1)
# L1 loss
fake_fake_t1 = generator_from_t2_to_t1(fake_t2)
loss_pixel_from_t1_to_t2 = criterion_pixelwise_from_t1_to_t2(fake_fake_t1, real_t1)
fake_fake_t2 = generator_from_t1_to_t2(fake_t1)
loss_pixel_from_t2_to_t1 = criterion_pixelwise_from_t2_to_t1(fake_fake_t2, real_t2)
# Total loss
loss_generator_from_t1_to_t2 = (lambda_GAN * loss_GAN_from_t1_to_t2 +
lambda_pixel * loss_pixel_from_t1_to_t2)
loss_generator_from_t2_to_t1 = (lambda_GAN * loss_GAN_from_t2_to_t1 +
lambda_pixel * loss_pixel_from_t2_to_t1)
loss_generator_from_t1_to_t2.backward()
loss_generator_from_t2_to_t1.backward()
optimizer_generator_from_t1_to_t2.step()
optimizer_generator_from_t2_to_t1.step()
# ----------------------
# Train Discriminators
# ----------------------
optimizer_discriminator_from_t1_to_t2.zero_grad()
optimizer_discriminator_from_t2_to_t1.zero_grad()
# Real loss
pred_real_t2 = discriminator_from_t1_to_t2(real_t2)
loss_real_t2 = criterion_GAN_from_t1_to_t2(pred_real_t2, valid_t2)
pred_real_t1 = discriminator_from_t2_to_t1(real_t1)
loss_real_t1 = criterion_GAN_from_t2_to_t1(pred_real_t1, valid_t1)
# Fake loss
fake_t2 = generator_from_t1_to_t2(real_t1)
pred_fake_t2 = discriminator_from_t1_to_t2(fake_t2.detach())
loss_fake_t2 = criterion_GAN_from_t1_to_t2(pred_fake_t2, imitation_t2)
fake_t1 = generator_from_t2_to_t1(real_t2)
pred_fake_t1 = discriminator_from_t2_to_t1(fake_t1.detach())
loss_fake_t1 = criterion_GAN_from_t2_to_t1(pred_fake_t1, imitation_t1)
# Total loss
loss_discriminator_from_t1_to_t2 = 0.5 * (loss_real_t2 + loss_fake_t2)
loss_discriminator_from_t2_to_t1 = 0.5 * (loss_real_t1 + loss_fake_t1)
loss_discriminator_from_t1_to_t2.backward()
loss_discriminator_from_t2_to_t1.backward()
optimizer_discriminator_from_t1_to_t2.step()
optimizer_discriminator_from_t2_to_t1.step()
# --------------
# Log Progress
# --------------
# Determine approximate time left
batches_done = epoch * len(train_loader) + i
batches_left = num_epoch * len(train_loader) - batches_done
time_left = datetime.timedelta(
seconds=batches_left * (time.time() - prev_time))
prev_time = time.time()
# Print log
sys.stdout.write(
"\r[Epoch %d/%d] [Batch %d/%d] "
"[Generator losses: %f, %f] "
"[Discriminator losses: %f, %f] "
"ETA: %s"
% (
epoch + 1,
num_epoch,
i,
len(train_loader),
loss_generator_from_t1_to_t2.item(),
loss_generator_from_t2_to_t1.item(),
loss_discriminator_from_t1_to_t2.item(),
loss_discriminator_from_t2_to_t1.item(),
time_left,
)
)
# Save images at the end of each epoch
sample_images(epoch)
return generator_from_t1_to_t2
# %% {"tags": ["remove_output"]}
generator_cyclegan = train_cyclegan(
train_loader, test_loader, num_epoch=num_epoch,
lr=lr, beta1=beta1, beta2=beta2
)
# %% [markdown]
# ## 3.3 Evaluating the generator of our CycleGAN
#
# Let's visualize a few generated T2-weighted images:
# %%
plt.figure(figsize=(20, 20))
im = img.imread(f'./images/cyclegan/epoch-{num_epoch - 1}.png')
plt.imshow(np.swapaxes(im, 0, 1))
plt.gca().invert_yaxis()
plt.show()
# %% [markdown]
# We will now evaluate the generator of the CycleGAN with the same three
# metrics on both the training set and the test set by computing the mean value
# for each metric.
# %%
evaluate_generator(generator_cyclegan)
# %% [markdown]
# You should obtain a lower performance for the CycleGAN, which is not so
# surprising since this task is unsupervised whereas the two other tasks are
# supervised.
#
# It does not mean that cycle GAN are not useful in practice.
# Datasets of unpaired samples are much more common than datasets of paired
# samples. Here is an example of transforming a picture into a painting from
# a famous painter using a CycleGAN
# (taken from the [original paper introducing CycleGAN](https://arxiv.org/pdf/1703.10593.pdf)):
# %% [markdown]
# <a href="https://ibb.co/Q8FrRrd">
# <img src="https://i.ibb.co/6vRgxgB/style-transfer.png" width="800" border="0">
#</a>
| d1 = self.down1(x)
d2 = self.down2(d1)
d3 = self.down3(d2)
d4 = self.down4(d3)
d5 = self.down5(d4)
u1 = self.up1(d5)
u2 = self.up2(u1, d4)
u3 = self.up3(u2, d3)
u4 = self.up4(u3, d2)
return self.final(u4, d1) |
max-concurrency.js | "use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.maxConcurrency = void 0;
const debug_1 = __importDefault(require("debug"));
const debug = debug_1.default("agenda:maxConcurrency");
/**
* Set the concurrency for jobs (globally), type does not matter
* @name Agenda#maxConcurrency | const maxConcurrency = function (concurrency) {
debug("Agenda.maxConcurrency(%d)", concurrency);
this._maxConcurrency = concurrency;
return this;
};
exports.maxConcurrency = maxConcurrency;
//# sourceMappingURL=max-concurrency.js.map | * @function
* @param concurrency max concurrency value
* @returns agenda instance
*/ |
layer-metadata-detail.js | import React from 'react';
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import { ListGroup, ListGroupItem, Button } from 'reactstrap';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import util from '../../../../util/util';
import { dateOverlap } from '../../../../modules/layers/util';
import {
addLayer as addLayerAction,
removeLayer as removeLayerAction,
} from '../../../../modules/layers/actions';
import {
selectLayer as selectLayerAction,
} from '../../../../modules/product-picker/actions';
import { getActiveLayersMap } from '../../../../modules/layers/selectors';
import RenderSplitLayerTitle from '../renderSplitTitle';
import RecentLayersInfo from '../browse/recent-layers-info';
class LayerMetadataDetail extends React.Component {
constructor(props) {
super(props);
this.state = {
isDateRangesExpanded: false,
};
this.toggleLayer = this.toggleLayer.bind(this);
}
toggleLayer() {
const {
addLayer, removeLayer, isActive, layer,
} = this.props;
if (isActive) {
removeLayer(layer.id);
} else {
addLayer(layer.id);
}
}
/**
* Toggle switch for the metadata info button and close arrow
* @method toggleMetadataButtons
* @param {e} event
* @return {void}
*/
toggleDateRanges(e) {
this.setState((prevState) => ({ isDateRangesExpanded: !prevState.isDateRangesExpanded }));
}
getListItems = (layer) => layer.dateRanges
.slice(0)
.reverse()
.map((l) => {
let listItemStartDate;
let listItemEndDate;
if (l.startDate) {
listItemStartDate = util.coverageDateFormatter('START-DATE', l.startDate, layer.period);
}
if (l.endDate) {
listItemEndDate = util.coverageDateFormatter('END-DATE', l.endDate, layer.period);
}
return (
<ListGroupItem key={`${l.startDate} - ${l.endDate}`}>
{`${listItemStartDate} - ${listItemEndDate}`}
</ListGroupItem>
);
});
/**
* Return text with the temporal range based on layer start
* and end dates
* @param {object} layer the layer object
* @return {string} Return a string with temporal range information
*/
dateRangeText = (layer) => {
const {
endDate,
id,
inactive,
period,
startDate,
} = layer;
let layerStartDate;
let startDateId;
let layerEndDate;
let endDateId;
if (startDate) {
startDateId = `${id}-startDate`;
layerStartDate = util.coverageDateFormatter('START-DATE', startDate, period);
}
if (endDate) {
endDateId = `${id}-endDate`;
layerEndDate = util.parseDate(endDate);
if (layerEndDate <= util.today() && !inactive) {
layerEndDate = 'Present';
} else {
layerEndDate = util.coverageDateFormatter('END-DATE', endDate, period);
}
} else {
layerEndDate = 'Present';
}
return `Temporal coverage:
<span class="layer-date-start" id='${startDateId}'> ${layerStartDate} </span> -
<span class="layer-end-date" id='${endDateId}'> ${layerEndDate} </span>`;
}
renderLayerDates() {
const { layer } = this.props;
const { isDateRangesExpanded } = this.state;
let listItems;
let dateRanges;
if (layer.dateRanges && layer.dateRanges.length > 1) {
dateRanges = dateOverlap(layer.period, layer.dateRanges);
if (dateRanges.overlap === false) {
listItems = this.getListItems(layer);
}
}
return (
<>
{layer.startDate && (
<p className="layer-date-range">
<span dangerouslySetInnerHTML={{ __html: this.dateRangeText(layer) }} />
{layer.dateRanges
&& layer.dateRanges.length > 1
&& dateRanges.overlap === false && (
<a
id="layer-date-ranges-button"
title="View all date ranges"
className="layer-date-ranges-button"
onClick={(e) => this.toggleDateRanges(e)}
>
<sup>
{isDateRangesExpanded ? ' *Hide ' : ' *Show '}
Dates
</sup>
</a>
)}
</p>
)}
{isDateRangesExpanded && listItems && (
<div className="layer-date-wrap">
<p>Date Ranges:</p>
<ListGroup className="layer-date-ranges">{listItems}</ListGroup>
</div>
)}
</>
);
}
renderNoSelection() {
const { categoryType } = this.props;
return categoryType === 'recent'
? (<RecentLayersInfo />)
: (
<div className="no-results">
<FontAwesomeIcon icon="globe-americas" />
<h3> No layer selected. </h3>
<h5> Select a layer to view details here!</h5>
</div>
);
}
render() {
const {
layer, selectedProjection, isActive, showPreviewImage,
} = this.props;
if (!layer) {
return this.renderNoSelection();
}
const { metadata } = layer;
const previewUrl = `images/layers/previews/${selectedProjection}/${layer.id}.jpg`;
const buttonText = isActive ? 'Remove Layer' : 'Add Layer';
const btnClass = isActive ? 'add-to-map-btn text-center is-active' : 'add-to-map-btn text-center';
const btnIconClass = isActive ? 'minus' : 'plus';
return (
<div className="layers-all-layer">
<div className="layers-all-header">
<RenderSplitLayerTitle layer={layer} />
{/*
<Button className="close-details" onClick={() => selectLayer(null)}>
<FontAwesomeIcon icon="chevron-down" />
</Button>
*/}
</div>
{showPreviewImage
&& (
<div className="text-center">
<a href={previewUrl} rel="noopener noreferrer" target="_blank">
<img className="img-fluid layer-preview" src={previewUrl} />
</a>
</div>
)}
<div className="text-center">
<Button className={btnClass} onClick={this.toggleLayer}>
<FontAwesomeIcon icon={btnIconClass} />
{buttonText}
</Button>
</div>
<div className="source-metadata visible">
{this.renderLayerDates()}
<div dangerouslySetInnerHTML={{ __html: metadata }} />
</div>
</div>
);
}
}
LayerMetadataDetail.propTypes = {
addLayer: PropTypes.func,
categoryType: PropTypes.string,
isActive: PropTypes.bool,
layer: PropTypes.object,
removeLayer: PropTypes.func,
selectedProjection: PropTypes.string, |
const mapStateToProps = (state) => {
const {
productPicker,
proj,
config,
} = state;
const { selectedLayer, categoryType } = productPicker;
const activeLayers = getActiveLayersMap(state);
const isActive = selectedLayer && !!activeLayers[selectedLayer.id];
return {
layer: selectedLayer,
isActive,
categoryType,
selectedProjection: proj.id,
showPreviewImage: config.features.previewSnapshots,
};
};
const mapDispatchToProps = (dispatch) => ({
addLayer: (id) => {
dispatch(addLayerAction(id));
},
removeLayer: (id) => {
dispatch(removeLayerAction(id));
},
selectLayer: (layer) => {
dispatch(selectLayerAction(layer));
},
});
export default connect(
mapStateToProps,
mapDispatchToProps,
)(LayerMetadataDetail); | showPreviewImage: PropTypes.bool,
}; |
lib.rs | #![doc(html_root_url = "https://docs.rs/tower-request-modifier/0.1.0")]
#![deny(missing_docs, missing_debug_implementations, unreachable_pub)]
#![cfg_attr(test, deny(warnings))]
//! A `tower::Service` middleware to modify the request.
use futures::Poll;
use http::header::{HeaderName, HeaderValue};
use http::uri::{self, Uri};
use http::{HttpTryFrom, Request};
use std::fmt;
use std::sync::Arc;
use tower_service::Service;
/// Wraps an HTTP service, injecting authority and scheme on every request.
pub struct RequestModifier<T, B> {
inner: T,
modifiers: Arc<Vec<Box<dyn Fn(Request<B>) -> Request<B> + Send + Sync>>>,
}
impl<T, B> std::fmt::Debug for RequestModifier<T, B> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
writeln!(f, "RequestModifier with {} modifiers", self.modifiers.len())
}
}
/// Configure an `RequestModifier` instance
pub struct Builder<B> {
modifiers: Vec<Result<Box<dyn Fn(Request<B>) -> Request<B> + Send + Sync>, BuilderError>>,
}
impl<B> Default for Builder<B> {
fn default() -> Self {
Builder {
modifiers: Vec::default(),
}
}
}
/// Errors that can happen when building an `RequestModifier`.
#[derive(Debug)]
pub struct BuilderError {
_p: (),
}
// ===== impl RequestModifier ======
impl<T, B> RequestModifier<T, B> {
/// Create a new `RequestModifier`
pub fn new(
inner: T,
modifiers: Arc<Vec<Box<Fn(Request<B>) -> Request<B> + Send + Sync>>>,
) -> Self {
RequestModifier {
inner: inner,
modifiers: modifiers,
}
}
/// Returns a reference to the inner service.
pub fn get_ref(&self) -> &T {
&self.inner
}
/// Returns a mutable reference to the inner service.
pub fn get_mut(&mut self) -> &mut T {
&mut self.inner
}
/// Consumes `self`, returning the inner service.
pub fn | (self) -> T {
self.inner
}
}
impl<T, B> Service<Request<B>> for RequestModifier<T, B>
where
T: Service<Request<B>>,
{
type Response = T::Response;
type Error = T::Error;
type Future = T::Future;
fn poll_ready(&mut self) -> Poll<(), Self::Error> {
self.inner.poll_ready()
}
fn call(&mut self, mut req: Request<B>) -> Self::Future {
let mods = &self.modifiers;
for m in mods.iter() {
req = m(req);
}
// Call the inner service
self.inner.call(req)
}
}
impl<T, B> Clone for RequestModifier<T, B>
where
T: Clone,
{
fn clone(&self) -> Self {
RequestModifier {
inner: self.inner.clone(),
modifiers: self.modifiers.clone(),
}
}
}
// ===== impl Builder ======
impl<B> Builder<B> {
/// Return a new, default builder
pub fn new() -> Self {
Builder::default()
}
/// Build a Fn to add desired header
fn make_add_header(
name: HeaderName,
val: HeaderValue,
) -> Box<Fn(Request<B>) -> Request<B> + Send + Sync> {
Box::new(move |mut req: Request<B>| {
req.headers_mut().append(name.clone(), val.clone());
req
})
}
/// Set a header on all requests.
pub fn add_header<T: ToString, R>(mut self, name: T, val: R) -> Self
where
HeaderName: HttpTryFrom<T>,
HeaderValue: HttpTryFrom<R>,
{
let name = HeaderName::try_from(name);
let val = HeaderValue::try_from(val);
let err = BuilderError { _p: () };
let modification = match (name, val) {
(Ok(name), Ok(val)) => Ok(Self::make_add_header(name, val)),
(_, _) => Err(err),
};
self.modifiers.push(modification);
self
}
/// Build a Fn to perform desired Request origin modification
fn make_set_origin(
scheme: uri::Scheme,
authority: uri::Authority,
) -> Box<Fn(Request<B>) -> Request<B> + Send + Sync> {
Box::new(move |req: Request<B>| {
// Split the request into the head and the body.
let (mut head, body) = req.into_parts();
// Split the request URI into parts.
let mut uri: http::uri::Parts = head.uri.into();
// Update the URI parts, setting the scheme and authority
uri.authority = Some(authority.clone());
uri.scheme = Some(scheme.clone());
// Update the the request URI
head.uri = http::Uri::from_parts(uri).expect("valid uri");
Request::from_parts(head, body)
})
}
/// Set the URI to use as the origin for all requests.
pub fn set_origin<T>(mut self, uri: T) -> Self
where
Uri: HttpTryFrom<T>,
{
let modification = Uri::try_from(uri)
.map_err(|_| BuilderError { _p: () })
.and_then(|u| {
let parts = uri::Parts::from(u);
let scheme = parts.scheme.ok_or(BuilderError { _p: () })?;
let authority = parts.authority.ok_or(BuilderError { _p: () })?;
let check = match parts.path_and_query {
None => Ok(()),
Some(ref path) if path == "/" => Ok(()),
_ => Err(BuilderError { _p: () }),
};
check.and_then(|_| Ok(Self::make_set_origin(scheme, authority)))
});
self.modifiers.push(modification);
self
}
/// Run an arbitrary modifier on all requests
pub fn add_modifier(
mut self,
modifier: Box<Fn(Request<B>) -> Request<B> + Send + Sync>,
) -> Self {
self.modifiers.push(Ok(modifier));
self
}
/// Build the `RequestModifier` from the provided settings.
pub fn build<T>(self, inner: T) -> Result<RequestModifier<T, B>, BuilderError> {
let modifiers = self.modifiers.into_iter().collect::<Result<Vec<_>, _>>()?;
Ok(RequestModifier::new(inner, Arc::new(modifiers)))
}
}
impl<B> fmt::Debug for Builder<B> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "RequestModifierBuilder")
}
}
| into_inner |
gulpfile.js | "use strict";
// Load plugins
const autoprefixer = require("gulp-autoprefixer");
const browsersync = require("browser-sync").create();
const cleanCSS = require("gulp-clean-css");
const del = require("del");
const gulp = require("gulp");
const header = require("gulp-header");
const merge = require("merge-stream");
const plumber = require("gulp-plumber");
const rename = require("gulp-rename");
const sass = require("gulp-sass");
const uglify = require('gulp-uglify-es').default;
// Load package.json for banner
const pkg = require('./package.json');
// Set the banner content
const banner = ['/*!\n',
' * Start Bootstrap - <%= pkg.title %> v<%= pkg.version %> (<%= pkg.homepage %>)\n',
' * Copyright 2013-' + (new Date()).getFullYear(), ' <%= pkg.author %>\n',
' * Licensed under <%= pkg.license %> (https://github.com/StartBootstrap/<%= pkg.name %>/blob/master/LICENSE)\n',
' */\n',
'\n'
].join('');
// BrowserSync
function browserSync(done) {
browsersync.init({
server: {
baseDir: "./"
},
port: 3000
});
done();
}
// BrowserSync reload
function browserSyncReload(done) {
browsersync.reload();
done();
}
// Clean vendor
function clean() {
return del(["./vendor/"]);
}
// Bring third party dependencies from node_modules into vendor directory
function modules() {
// Bootstrap
var bootstrap = gulp.src('./node_modules/bootstrap/dist/**/*')
.pipe(gulp.dest('./vendor/bootstrap'));
// Font Awesome CSS
var fontAwesomeCSS = gulp.src('./node_modules/@fortawesome/fontawesome-free/css/**/*')
.pipe(gulp.dest('./vendor/fontawesome-free/css'));
// Font Awesome Webfonts
var fontAwesomeWebfonts = gulp.src('./node_modules/@fortawesome/fontawesome-free/webfonts/**/*')
.pipe(gulp.dest('./vendor/fontawesome-free/webfonts'));
// jQuery
var jquery = gulp.src([
'./node_modules/jquery/dist/*',
'!./node_modules/jquery/dist/core.js'
])
.pipe(gulp.dest('./vendor/jquery'));
return merge(bootstrap, fontAwesomeCSS, fontAwesomeWebfonts, jquery);
}
// CSS task
function css() {
return gulp
.src("./scss/**/*.scss")
.pipe(plumber())
.pipe(sass({
outputStyle: "expanded",
includePaths: "./node_modules",
}))
.on("error", sass.logError)
.pipe(autoprefixer({
cascade: false
}))
.pipe(header(banner, {
pkg: pkg
}))
.pipe(gulp.dest("./css"))
.pipe(rename({
suffix: ".min"
}))
.pipe(cleanCSS())
.pipe(gulp.dest("./css"))
.pipe(browsersync.stream());
}
// JS task
function js() {
return gulp
.src([
'./js/*.js',
'!./js/*.min.js'
])
.pipe(uglify())
.pipe(header(banner, {
pkg: pkg
}))
.pipe(rename({ | .pipe(browsersync.stream());
}
// Watch files
function watchFiles() {
gulp.watch("./scss/**/*", css);
gulp.watch(["./js/**/*", "!./js/**/*.min.js"], js);
gulp.watch("./**/*.html", browserSyncReload);
}
// Define complex tasks
const vendor = gulp.series(clean, modules);
const build = gulp.series(vendor, gulp.parallel(css, js));
const watch = gulp.series(build, gulp.parallel(watchFiles, browserSync));
// Export tasks
exports.css = css;
exports.js = js;
exports.clean = clean;
exports.vendor = vendor;
exports.build = build;
exports.watch = watch;
exports.default = build; | suffix: '.min'
}))
.pipe(gulp.dest('./js')) |
builder.go | // Copyright (C) 2019-2021, Ava Labs, Inc. All rights reserved.
// See the file LICENSE for licensing terms.
package vertex
import (
"github.com/ava-labs/avalanchego/ids"
"github.com/ava-labs/avalanchego/snow/consensus/avalanche"
"github.com/ava-labs/avalanchego/snow/consensus/snowstorm"
"github.com/ava-labs/avalanchego/utils/hashing"
)
// Builder builds a vertex given a set of parentIDs and transactions.
type Builder interface {
// Build a new vertex from the contents of a vertex
BuildVtx(parentIDs []ids.ID, txs []snowstorm.Tx) (avalanche.Vertex, error)
// Build a new stop vertex from the parents
BuildStopVtx(parentIDs []ids.ID) (avalanche.Vertex, error)
}
// Build a new stateless vertex from the contents of a vertex
func Build(
chainID ids.ID,
height uint64,
parentIDs []ids.ID,
txs [][]byte,
) (StatelessVertex, error) {
return buildVtx(
chainID,
height,
parentIDs,
txs,
func(vtx innerStatelessVertex) error {
return vtx.verify()
},
false,
)
}
// Build a new stateless vertex from the contents of a vertex
func BuildStopVertex(chainID ids.ID, height uint64, parentIDs []ids.ID) (StatelessVertex, error) {
return buildVtx(
chainID,
height,
parentIDs,
nil,
func(vtx innerStatelessVertex) error {
return vtx.verifyStopVertex()
},
true,
)
}
func | (
chainID ids.ID,
height uint64,
parentIDs []ids.ID,
txs [][]byte,
verifyFunc func(innerStatelessVertex) error,
stopVertex bool,
) (StatelessVertex, error) {
ids.SortIDs(parentIDs)
SortHashOf(txs)
codecVer := codecVersion
if stopVertex {
// use new codec version for the "StopVertex"
codecVer = codecVersionWithStopVtx
}
innerVtx := innerStatelessVertex{
Version: codecVer,
ChainID: chainID,
Height: height,
Epoch: 0,
ParentIDs: parentIDs,
Txs: txs,
}
if err := verifyFunc(innerVtx); err != nil {
return nil, err
}
vtxBytes, err := c.Marshal(innerVtx.Version, innerVtx)
vtx := statelessVertex{
innerStatelessVertex: innerVtx,
id: hashing.ComputeHash256Array(vtxBytes),
bytes: vtxBytes,
}
return vtx, err
}
| buildVtx |
profiler.py | # Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import paddle
# A global variable to record the number of calling times for profiler
# functions. It is used to specify the tracing range of training steps.
_profiler_step_id = 0
# A global variable to avoid parsing from string every time.
_profiler_options = None
class ProfilerOptions(object):
'''
Use a string to initialize a ProfilerOptions.
The string should be in the format: "key1=value1;key2=value;key3=value3".
For example:
"profile_path=model.profile"
"batch_range=[50, 60]; profile_path=model.profile"
"batch_range=[50, 60]; tracer_option=OpDetail; profile_path=model.profile"
ProfilerOptions supports following key-value pair:
batch_range - a integer list, e.g. [100, 110].
state - a string, the optional values are 'CPU', 'GPU' or 'All'.
sorted_key - a string, the optional values are 'calls', 'total',
'max', 'min' or 'ave.
tracer_option - a string, the optional values are 'Default', 'OpDetail',
'AllOpDetail'.
profile_path - a string, the path to save the serialized profile data,
which can be used to generate a timeline.
exit_on_finished - a boolean.
'''
def __init__(self, options_str):
assert isinstance(options_str, str)
self._options = {
'batch_range': [10, 20],
'state': 'All',
'sorted_key': 'total',
'tracer_option': 'Default',
'profile_path': '/tmp/profile',
'exit_on_finished': True
}
self._parse_from_string(options_str)
def _parse_from_string(self, options_str):
for kv in options_str.replace(' ', '').split(';'):
key, value = kv.split('=')
if key == 'batch_range':
value_list = value.replace('[', '').replace(']', '').split(',')
value_list = list(map(int, value_list))
if len(value_list) >= 2 and value_list[0] >= 0 and value_list[
1] > value_list[0]:
self._options[key] = value_list
elif key == 'exit_on_finished':
self._options[key] = value.lower() in ("yes", "true", "t", "1")
elif key in [
'state', 'sorted_key', 'tracer_option', 'profile_path'
]:
self._options[key] = value
def __getitem__(self, name):
if self._options.get(name, None) is None:
raise ValueError( | def add_profiler_step(options_str=None):
'''
Enable the operator-level timing using PaddlePaddle's profiler.
The profiler uses a independent variable to count the profiler steps.
One call of this function is treated as a profiler step.
Args:
profiler_options - a string to initialize the ProfilerOptions.
Default is None, and the profiler is disabled.
'''
if options_str is None:
return
global _profiler_step_id
global _profiler_options
if _profiler_options is None:
_profiler_options = ProfilerOptions(options_str)
if _profiler_step_id == _profiler_options['batch_range'][0]:
paddle.utils.profiler.start_profiler(
_profiler_options['state'], _profiler_options['tracer_option'])
elif _profiler_step_id == _profiler_options['batch_range'][1]:
paddle.utils.profiler.stop_profiler(_profiler_options['sorted_key'],
_profiler_options['profile_path'])
if _profiler_options['exit_on_finished']:
sys.exit(0)
_profiler_step_id += 1 | "ProfilerOptions does not have an option named %s." % name)
return self._options[name]
|
CCInput.js | import React from 'react';
import {
StyleSheet,
Dimensions,
ScrollView,
Image,
ImageBackground,
Platform,
AsyncStorage,
} from 'react-native';
import { Block, Text, theme } from 'galio-framework';
import { Button, Select, Icon, Input, Header, Switch } from '../components';
//import { Button } from "../components";
import MapView from 'react-native-maps';
import openMap from 'react-native-open-maps';
import { Images, argonTheme } from '../constants';
import { HeaderHeight } from '../constants/utils';
import { Card, Rating } from 'react-native-elements';
const { width, height } = Dimensions.get('screen');
const thumbMeasure = (width - 48 - 32) / 3;
class | extends React.Component {
constructor(props) {
super(props);
this.state = {
region: null,
timing: '',
};
}
async componentWillMount() {
// Asking for device location permission
const open = this.props.route.params.item;
let n = open.includes('opening_hours');
if (n) {
this.setState({
timing: this.props.route.params.item.opening_hours.open_now,
});
} else {
this.setState({ timing: 'Not Mentioned' });
}
const location = this.props.route.params.item.geometry.location;
let region = {
latitude: location.lat,
longitude: location.lng,
latitudeDelta: 0.045,
longitudeDelta: 0.045,
};
this.setState({ region: region });
}
render() {
//const item = this.props.route.params;
const item = this.props.route.params.item.name;
const item2 = this.props.route.params.item.vicinity;
const bs = this.props.route.params.item.business_status;
const location = this.props.route.params.item.geometry.location;
//const open=this.props.route.params.item.opening_hours.open_now;
const rating = this.props.route.params.item.user_ratings_total;
//const check =this.props.route.params.item.plus_code.compound_code;
//const item = this.props.navigation.getParam('item');
console.log('here' + this.state.timing);
const openAppMap = () => {
openMap({
latitude: location.lat,
longitude: location.lng,
zoom: 19,
});
};
return (
<Block flex style={styles.profile}>
<Block flex>
<ImageBackground
source={Images.ProfileBackground}
style={styles.profileContainer}
imageStyle={styles.profileBackground}
>
<Block style={{ marginBottom: theme.SIZES.BASE }}>
<Header
transparent
white
title='Profile'
navigation={this.props.navigation}
/>
</Block>
<ScrollView
showsVerticalScrollIndicator={false}
style={{ width, marginTop: '5%' }}
>
<Block flex style={styles.profileCard}>
<Block middle style={styles.avatarContainer}>
<Image
source={{
uri:
'https://image.flaticon.com/icons/png/512/1516/1516386.png',
}}
style={styles.avatar}
/>
</Block>
<Block style={styles.info}>
<Block row space='between'>
<Block middle>
<Text size={12} color={argonTheme.COLORS.TEXT}>
Timings
</Text>
<Text
bold
size={18}
color='#525F7F'
style={{ marginBottom: 4 }}
>
{this.state.timing ? 'Open' : 'Closed'}
</Text>
</Block>
{/* <Block middle>
<Text
bold
color="#525F7F"
size={18}
style={{ marginBottom: 4 }}
>
{open}
</Text>
<Text size={12} color={argonTheme.COLORS.TEXT}>Photos</Text>
</Block> */}
<Block middle>
<Text size={12} color={argonTheme.COLORS.TEXT}>
Customer Rating
</Text>
<Rating
type='star'
imageSize={15}
readonly
style={{ paddingRight: 10, marginTop: 7 }}
startingValue={rating}
/>
</Block>
</Block>
</Block>
<Block flex>
<Block middle style={styles.nameInfo}>
<Text bold size={28} color='#32325D'>
{item}
</Text>
<Text size={16} color='green' style={{ marginTop: 10 }}>
{bs}
</Text>
</Block>
<Block middle style={{ marginTop: 30, marginBottom: 16 }}>
<Block style={styles.divider} />
</Block>
<Block middle>
<Text
size={16}
color='#525F7F'
style={{ textAlign: 'center' }}
selectable
>
{item2}
</Text>
</Block>
<Block style={{ paddingBottom: -HeaderHeight * 2 }}>
{/* <Block row space="between" style={{ flexWrap: "wrap" }}>
{Photos.map((img, imgIndex) => (
<Image
source={{ uri: img }}
key={`viewed-${img}`}
resizeMode="cover"
style={styles.thumb}
/>
))}
</Block> */}
<MapView
style={{ height: height / 2, width: '100%' }}
initialRegion={location.lat}
onPress={openAppMap}
>
<MapView.Marker
coordinate={{
latitude: location.lat,
longitude: location.lng,
}}
/>
</MapView>
</Block>
</Block>
</Block>
</ScrollView>
</ImageBackground>
</Block>
{/* <ScrollView showsVerticalScrollIndicator={false}
contentContainerStyle={{ flex: 1, width, height, zIndex: 9000, backgroundColor: 'red' }}>
<Block flex style={styles.profileCard}>
<Block middle style={styles.avatarContainer}>
<Image
source={{ uri: Images.ProfilePicture }}
style={styles.avatar}
/>
</Block>
<Block style={styles.info}>
<Block
middle
row
space="evenly"
style={{ marginTop: 20, paddingBottom: 24 }}
>
<Button small style={{ backgroundColor: argonTheme.COLORS.INFO }}>
CONNECT
</Button>
<Button
small
style={{ backgroundColor: argonTheme.COLORS.DEFAULT }}
>
MESSAGE
</Button>
</Block>
<Block row space="between">
<Block middle>
<Text
bold
size={12}
color="#525F7F"
style={{ marginBottom: 4 }}
>
2K
</Text>
<Text size={12}>Orders</Text>
</Block>
<Block middle>
<Text bold size={12} style={{ marginBottom: 4 }}>
10
</Text>
<Text size={12}>Photos</Text>
</Block>
<Block middle>
<Text bold size={12} style={{ marginBottom: 4 }}>
89
</Text>
<Text size={12}>Comments</Text>
</Block>
</Block>
</Block>
<Block flex>
<Block middle style={styles.nameInfo}>
<Text bold size={28} color="#32325D">
Jessica Jones, 27
</Text>
<Text size={16} color="#32325D" style={{ marginTop: 10 }}>
San Francisco, USA
</Text>
</Block>
<Block middle style={{ marginTop: 30, marginBottom: 16 }}>
<Block style={styles.divider} />
</Block>
<Block middle>
<Text size={16} color="#525F7F" style={{ textAlign: "center" }}>
An artist of considerable range, Jessica name taken by
Melbourne …
</Text>
<Button
color="transparent"
textStyle={{
color: "#233DD2",
fontWeight: "500",
fontSize: 16
}}
>
Show more
</Button>
</Block>
<Block
row
style={{ paddingVertical: 14, alignItems: "baseline" }}
>
<Text bold size={16} color="#525F7F">
Album
</Text>
</Block>
<Block
row
style={{ paddingBottom: 20, justifyContent: "flex-end" }}
>
<Button
small
color="transparent"
textStyle={{ color: "#5E72E4", fontSize: 12 }}
>
View all
</Button>
</Block>
<Block style={{ paddingBottom: -HeaderHeight * 2 }}>
<Block row space="between" style={{ flexWrap: "wrap" }}>
{Images.Viewed.map((img, imgIndex) => (
<Image
source={{ uri: img }}
key={`viewed-${img}`}
resizeMode="cover"
style={styles.thumb}
/>
))}
</Block>
</Block>
</Block>
</Block>
</ScrollView>*/}
</Block>
);
}
}
const styles = StyleSheet.create({
profile: {
marginTop: Platform.OS === 'android' ? -HeaderHeight : 0,
// marginBottom: -HeaderHeight * 2,
flex: 1,
},
profileContainer: {
width: width,
height: height,
padding: 0,
zIndex: 1,
// marginTop:100,
},
profileBackground: {
width: width,
height: height / 2,
},
profileCard: {
// position: "relative",
padding: theme.SIZES.BASE,
marginHorizontal: theme.SIZES.BASE,
marginTop: 100,
borderTopLeftRadius: 6,
borderTopRightRadius: 6,
backgroundColor: theme.COLORS.WHITE,
shadowColor: 'black',
shadowOffset: { width: 0, height: 0 },
shadowRadius: 8,
shadowOpacity: 0.2,
zIndex: 2,
},
info: {
paddingHorizontal: 40,
},
avatarContainer: {
position: 'relative',
marginTop: -80,
},
avatar: {
width: 180,
height: 180,
borderRadius: 62,
borderWidth: 0,
},
nameInfo: {
marginTop: 35,
},
divider: {
width: '90%',
borderWidth: 1,
borderColor: '#E9ECEF',
},
thumb: {
borderRadius: 4,
marginVertical: 4,
alignSelf: 'center',
width: thumbMeasure,
height: thumbMeasure,
},
});
export default CCInput;
| CCInput |
urls.py | from django.urls import path
from mysit.views import *
app_name = 'mysit'
urlpatterns = [
path('',index_views, name='index'),
path('about',about_views, name='about'),
path('contact',contact_views, name='contact'),
path('gallery',gallery_views, name='gallery'),
path('menu',menu_views, name='menu'),
path('reservation',reservation_views, name='reservation'), | ] |
|
defaults_test.go | package defaults
import (
"testing"
"github.com/stretchr/testify/assert"
)
func Test_String(t *testing.T) {
a := assert.New(t)
a.Equal(String("", "foo"), "foo")
a.Equal(String("bar", "foo"), "bar")
var s string
a.Equal(String(s, "foo"), "foo")
}
func Test_Int(t *testing.T) {
a := assert.New(t)
a.Equal(Int(0, 1), 1)
a.Equal(Int(2, 1), 2)
var s int
a.Equal(Int(s, 1), 1)
}
func Test_Int64(t *testing.T) {
a := assert.New(t)
a.Equal(Int64(0, 1), int64(1))
a.Equal(Int64(2, 1), int64(2))
var s int64
a.Equal(Int64(s, 1), int64(1))
}
func Test_Float32(t *testing.T) {
a := assert.New(t)
a.Equal(Float32(0, 1), float32(1))
a.Equal(Float32(2, 1), float32(2))
var s float32
a.Equal(Float32(s, 1), float32(1))
}
func | (t *testing.T) {
a := assert.New(t)
a.Equal(Float64(0, 1), float64(1))
a.Equal(Float64(2, 1), float64(2))
var s float64
a.Equal(Float64(s, 1), float64(1))
}
| Test_Float64 |
32.js | (function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('@carbon/icon-helpers'), require('prop-types'), require('react')) :
typeof define === 'function' && define.amd ? define(['@carbon/icon-helpers', 'prop-types', 'react'], factory) :
(global.Report32 = factory(global.CarbonIconHelpers,global.PropTypes,global.React));
}(this, (function (iconHelpers,PropTypes,React) { 'use strict';
PropTypes = PropTypes && PropTypes.hasOwnProperty('default') ? PropTypes['default'] : PropTypes;
React = React && React.hasOwnProperty('default') ? React['default'] : React;
function | (obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; var ownKeys = Object.keys(source); if (typeof Object.getOwnPropertySymbols === 'function') { ownKeys = ownKeys.concat(Object.getOwnPropertySymbols(source).filter(function (sym) { return Object.getOwnPropertyDescriptor(source, sym).enumerable; })); } ownKeys.forEach(function (key) { _defineProperty(target, key, source[key]); }); } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
var defaultStyle = {
"willChange": "transform"
};
var Report32 = React.forwardRef(function (_ref, ref) {
var className = _ref.className,
children = _ref.children,
style = _ref.style,
tabIndex = _ref.tabIndex,
rest = _objectWithoutProperties(_ref, ["className", "children", "style", "tabIndex"]);
var _getAttributes = iconHelpers.getAttributes(_objectSpread({}, rest, {
tabindex: tabIndex
})),
tabindex = _getAttributes.tabindex,
props = _objectWithoutProperties(_getAttributes, ["tabindex"]);
if (className) {
props.className = className;
}
if (tabindex !== undefined && tabindex !== null) {
props.tabIndex = tabindex;
}
if (_typeof(style) === 'object') {
props.style = _objectSpread({}, defaultStyle, style);
} else {
props.style = defaultStyle;
}
if (ref) {
props.ref = ref;
}
return React.createElement('svg', props, children, React.createElement('path', {
d: 'M10 18h8v2h-8zm0-5h12v2H10zm0 10h5v2h-5z'
}), React.createElement('path', {
d: 'M25 5h-3V4a2 2 0 0 0-2-2h-8a2 2 0 0 0-2 2v1H7a2 2 0 0 0-2 2v21a2 2 0 0 0 2 2h18a2 2 0 0 0 2-2V7a2 2 0 0 0-2-2zM12 4h8v4h-8zm13 24H7V7h3v3h12V7h3z'
}));
});
Report32.displayName = 'Report32';
Report32.propTypes = {
'aria-hidden': PropTypes.bool,
'aria-label': PropTypes.string,
'aria-labelledby': PropTypes.string,
className: PropTypes.string,
children: PropTypes.node,
height: PropTypes.number,
preserveAspectRatio: PropTypes.string,
tabIndex: PropTypes.string,
viewBox: PropTypes.string,
width: PropTypes.number,
xmlns: PropTypes.string
};
Report32.defaultProps = {
width: 32,
height: 32,
viewBox: '0 0 32 32',
xmlns: 'http://www.w3.org/2000/svg',
preserveAspectRatio: 'xMidYMid meet'
};
return Report32;
})));
| _typeof |
download_file.py | from st_library import Library |
st_lib = Library()
st_lib.set_token('token')
st_lib.set_config_id('52db99d3-edfb-44c5-b97a-f09df4402081')
print(st_lib.unstruct_data.download_file("19a29b9b-bea2-40fb-89c4-555bba829539","image.jpg")) | |
issue-78701.rs | #![crate_name = "foo"]
// This test ensures that if a blanket impl has the same ID as another impl, it'll
// link to the blanket impl and not the other impl. Basically, we're checking if
// the ID is correctly derived.
// @has 'foo/struct.AnotherStruct.html'
// @count - '//*[@class="sidebar"]//a[@href="#impl-AnAmazingTrait"]' 1
// @count - '//*[@class="sidebar"]//a[@href="#impl-AnAmazingTrait-1"]' 1
pub trait Something {}
pub trait AnAmazingTrait {}
impl<T: Something> AnAmazingTrait for T {}
pub struct | <T>(T);
impl<T: Something> Something for AnotherStruct<T> {}
impl AnAmazingTrait for AnotherStruct<()> {}
| AnotherStruct |
main.go | package main
import (
"encoding/json"
"log"
"os"
"terraform-resource/check"
"terraform-resource/encoder"
"terraform-resource/models"
)
func main() {
req := models.InRequest{}
if err := json.NewDecoder(os.Stdin).Decode(&req); err != nil {
log.Fatalf("Failed to read InRequest: %s", err)
}
cmd := check.Runner{
LogWriter: os.Stderr,
}
resp, err := cmd.Run(req)
if err != nil {
log.Fatal(err)
}
| }
} | if err := encoder.NewJSONEncoder(os.Stdout).Encode(resp); err != nil {
log.Fatalf("Failed to write Versions to stdout: %s", err) |
catmull_rom_spline.rs | use cubic_spline::spline;
use get_input_index;
use primitive::InterpolationPrimitive;
/// Catmull-Rom spline interpolation
///
/// `f(t) = (2d^3 + 3d^2 + 1)p0 + (d^3 - 2d^2 + d)m0 + (-2d^3 + 3d^2)p1 + (d^3 - d^2)m1`
/// `d = (t - t0) / (t1 - t0)`
/// `p0 = position at left keyframe`
/// `p1 = position at right keyframe`
/// `k = left keyframe index`
/// `k+1 = right keyframe index`
/// `m0 = (p_k+1 - p_k-1) / (t_k+1 - t_k-1)`
/// `m1 = (p_k+2 - p_k) / (t_k+2 - t_k)`
/// `t0 = input at left keyframe`
/// `t1 = input at right keyframe`
///
/// ## Parameters:
///
/// - `input`: the input value to the function
/// - `inputs`: list of discrete input values for each keyframe
/// - `outputs`: list of output values to interpolate between, for catmull rom spline interpolation
/// this should be the size of `inputs` + 2
/// `[ in_tangent_0, position_0, position_1, .., position_n, out_tangent_n ]`
/// - `normalize`: if true, normalize the interpolated value before returning it
pub fn catmull_rom_spline_interpolate<T>(
input: f32,
inputs: &[f32],
outputs: &[T],
normalize: bool,
) -> T
where
T: InterpolationPrimitive + Clone,
{
let input_index = match get_input_index(input, inputs) {
Some(index) => index,
None => return outputs[1].clone(),
};
if input_index >= (inputs.len() - 1) {
outputs[outputs.len() - 2].clone()
} else {
let t_diff = inputs[input_index + 1] - inputs[input_index];
let v = spline(
input,
inputs[input_index],
t_diff,
&outputs[input_index + 1],
&outputs[input_index + 2],
&catmull_tangent(input_index, inputs, outputs),
&catmull_tangent(input_index + 1, inputs, outputs),
);
if normalize {
v.normalize()
} else {
v
}
}
}
fn catmull_tangent<D>(index: usize, inputs: &[f32], outputs: &[D]) -> D
where
D: InterpolationPrimitive + Clone,
{
let output_index = index + 1;
if index == 0 {
outputs[0].clone()
} else if index == inputs.len() - 1 {
outputs[outputs.len() - 1].clone()
} else {
outputs[output_index + 1]
.sub(&outputs[output_index - 1])
.mul(1. / (inputs[index + 1] - inputs[index - 1]))
}
}
#[cfg(test)]
mod tests {
use super::*;
use mint::{Quaternion, Vector3};
#[test]
fn test_catmull_arr3() {
let input = vec![0., 1., 2., 3., 4.];
let output = vec![
[1., 0., 0.],
[0., 0., 0.],
[1., 0., 0.],
[0., 0., 0.],
[-1., 0., 0.],
[0., 0., 0.],
[-1., 0., 0.],
];
assert_eq!(
[0.625, 0., 0.],
catmull_rom_spline_interpolate(0.5, &input, &output, false)
);
}
#[test]
fn | () {
let input = vec![0., 1., 2., 3., 4.];
let output = vec![
[1., 0., 0., 0.],
[0., 0., 0., 0.],
[1., 0., 0., 0.],
[0., 0., 0., 0.],
[-1., 0., 0., 0.],
[0., 0., 0., 0.],
[-1., 0., 0., 0.],
];
assert_eq!(
[1., 0., 0., 0.],
catmull_rom_spline_interpolate(0.5, &input, &output, true)
);
}
#[test]
fn test_catmull_vec3() {
let input = vec![0., 1., 2., 3., 4.];
let output = vec![
Vector3::from([1., 0., 0.]),
Vector3::from([0., 0., 0.]),
Vector3::from([1., 0., 0.]),
Vector3::from([0., 0., 0.]),
Vector3::from([-1., 0., 0.]),
Vector3::from([0., 0., 0.]),
Vector3::from([-1., 0., 0.]),
];
assert_eq!(
Vector3::from([0.625, 0., 0.]),
catmull_rom_spline_interpolate(0.5, &input, &output, false)
);
}
#[test]
fn test_catmull_quat() {
let input = vec![0., 1., 2., 3., 4.];
let output = vec![
Quaternion::from([1., 0., 0., 0.]),
Quaternion::from([0., 0., 0., 0.]),
Quaternion::from([1., 0., 0., 0.]),
Quaternion::from([0., 0., 0., 0.]),
Quaternion::from([-1., 0., 0., 0.]),
Quaternion::from([0., 0., 0., 0.]),
Quaternion::from([-1., 0., 0., 0.]),
];
assert_eq!(
Quaternion::from([1., 0., 0., 0.]),
catmull_rom_spline_interpolate(0.5, &input, &output, true)
);
}
}
| test_catmull_arr4 |
conf.py | #!/usr/bin/env python3
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
import shlex
import subprocess
sys.path.insert(0, os.path.abspath('.'))
import sphinx_rtd_theme
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
import sphinx_markdown_tables
import recommonmark
from recommonmark.transform import AutoStructify
from recommonmark.parser import CommonMarkParser
# source_parsers = {
# '.md': CommonMarkParser,
# }
source_suffix = ['.rst', '.md']
# -- Project information -----------------------------------------------------
project = 'Kyuubi'
copyright = '''
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
author = 'Kent Yao'
# The full version, including alpha/beta/rc tags
release = subprocess.getoutput("cd .. && build/mvn help:evaluate -Dexpression=project.version|grep -v Using|grep -v INFO|grep -v WARNING|tail -n 1").split('\n')[-1]
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.mathjax',
'recommonmark',
'sphinx_markdown_tables',
'notfound.extension',
]
master_doc = 'index'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# html_theme_options = {
# 'logo_only': True
# }
html_logo = 'imgs/kyuubi_logo_gray.png'
pygments_style = 'sphinx'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
htmlhelp_basename = 'Recommonmarkdoc'
github_doc_root = 'https://github.com/apache/incubator-kyuubi/tree/master/docs/'
def | (app):
app.add_config_value('recommonmark_config', {
'url_resolver': lambda url: github_doc_root + url,
'auto_toc_tree_section': 'Contents',
'enable_eval_rst': True,
}, True)
app.add_transform(AutoStructify)
| setup |
trendy.py | import numpy as np
from filter import movingaverage
def gentrends(x, window=1/3.0, charts=True):
|
def segtrends(x, segments=2, charts=True, window=7):
"""
Turn minitrends to iterative process more easily adaptable to
implementation in simple trading systems; allows backtesting functionality.
:param x: One-dimensional data set
:param window: How long the trendlines should be. If window < 1, then it
will be taken as a percentage of the size of the data
:param charts: Boolean value saying whether to print chart to screen
"""
import numpy as np
y = np.array(x)
n=len(y)
movy = movingaverage(y, window)
# Implement trendlines and Find the indexes of these maxima in the data
segments = int(segments)
maxima = np.ones(segments)
minima = np.ones(segments)
x_maxima = np.ones(segments)
x_minima = np.ones(segments)
segsize = int(len(y)/segments)
for i in range(1, segments+1):
ind2 = i*segsize
ind1 = ind2 - segsize
seg = y[ind1:ind2]
maxima[i-1] = max(seg)
minima[i-1] = min(seg)
x_maxima[i-1] = ind1 + (np.where(seg == maxima[i-1])[0][0])
x_minima[i-1] = ind1 + (np.where(seg == minima[i-1])[0][0])
if charts:
import matplotlib.pyplot as plt
plt.plot(y)
plt.grid(True)
for i in range(0, segments-1):
maxslope = (maxima[i+1] - maxima[i]) / (x_maxima[i+1] - x_maxima[i])
a_max = maxima[i] - (maxslope * x_maxima[i])
b_max = maxima[i] + (maxslope * (len(y) - x_maxima[i]))
maxline = np.linspace(a_max, b_max, len(y))
minslope = (minima[i+1] - minima[i]) / (x_minima[i+1] - x_minima[i])
a_min = minima[i] - (minslope * x_minima[i])
b_min = minima[i] + (minslope * (len(y) - x_minima[i]))
minline = np.linspace(a_min, b_min, len(y))
if charts:
#plt.plot(maxline, 'g')
#plt.plot(minline, 'r')
pass
if charts:
plt.plot(range(n), movy, 'b')
plt.plot(x_maxima, maxima, 'g')
plt.plot(x_minima, minima, 'r')
plt.show()
# OUTPUT
return x_maxima, maxima, x_minima, minima
def minitrends(x, window=20, charts=True):
"""
Turn minitrends to iterative process more easily adaptable to
implementation in simple trading systems; allows backtesting functionality.
:param x: One-dimensional data set
:param window: How long the trendlines should be. If window < 1, then it
will be taken as a percentage of the size of the data
:param charts: Boolean value saying whether to print chart to screen
"""
import numpy as np
y = np.array(x)
if window < 1: # if window is given as fraction of data length
window = float(window)
window = int(window * len(y))
x = np.arange(0, len(y))
dy = y[window:] - y[:-window]
crit = dy[:-1] * dy[1:] < 0
# Find whether max's or min's
maxi = (y[x[crit]] - y[x[crit] + window] > 0) & \
(y[x[crit]] - y[x[crit] - window] > 0) * 1
mini = (y[x[crit]] - y[x[crit] + window] < 0) & \
(y[x[crit]] - y[x[crit] - window] < 0) * 1
maxi = maxi.astype(float)
mini = mini.astype(float)
maxi[maxi == 0] = np.nan
mini[mini == 0] = np.nan
xmax = x[crit] * maxi
xmax = xmax[~np.isnan(xmax)]
xmax = xmax.astype(int)
xmin = x[crit] * mini
xmin = xmin[~np.isnan(xmin)]
xmin = xmin.astype(int)
# See if better max or min in region
yMax = np.array([])
xMax = np.array([])
for i in xmax:
indx = np.where(xmax == i)[0][0] + 1
try:
Y = y[i:xmax[indx]]
yMax = np.append(yMax, Y.max())
xMax = np.append(xMax, np.where(y == yMax[-1])[0][0])
except:
pass
yMin = np.array([])
xMin = np.array([])
for i in xmin:
indx = np.where(xmin == i)[0][0] + 1
try:
Y = y[i:xmin[indx]]
yMin = np.append(yMin, Y.min())
xMin = np.append(xMin, np.where(y == yMin[-1])[0][0])
except:
pass
if y[-1] > yMax[-1]:
yMax = np.append(yMax, y[-1])
xMax = np.append(xMax, x[-1])
if y[0] not in yMax:
yMax = np.insert(yMax, 0, y[0])
xMax = np.insert(xMax, 0, x[0])
if y[-1] < yMin[-1]:
yMin = np.append(yMin, y[-1])
xMin = np.append(xMin, x[-1])
if y[0] not in yMin:
yMin = np.insert(yMin, 0, y[0])
xMin = np.insert(xMin, 0, x[0])
# Plot results if desired
if charts is True:
from matplotlib.pyplot import plot, show, grid
plot(x, y)
plot(xMax, yMax, '-o')
plot(xMin, yMin, '-o')
grid(True)
show()
# Return arrays of critical points
return xMax, yMax, xMin, yMin
def iterlines(x, window=30, charts=True):
"""
Turn minitrends to iterative process more easily adaptable to
implementation in simple trading systems; allows backtesting functionality.
:param x: One-dimensional data set
:param window: How long the trendlines should be. If window < 1, then it
will be taken as a percentage of the size of the data
:param charts: Boolean value saying whether to print chart to screen
"""
import numpy as np
x = np.array(x)
n = len(x)
if window < 1:
window = int(window * n)
sigs = np.zeros(n, dtype=float)
i = window
while i != n:
if x[i] > max(x[i-window:i]): sigs[i] = 1
elif x[i] < min(x[i-window:i]): sigs[i] = -1
i += 1
xmin = np.where(sigs == -1.0)[0]
xmax = np.where(sigs == 1.0)[0]
ymin = x[xmin]
ymax = x[xmax]
if charts is True:
from matplotlib.pyplot import plot, grid, show
plot(x)
plot(xmin, ymin, 'ro')
plot(xmax, ymax, 'go')
grid(True)
show()
return sigs
| """
Returns a Pandas dataframe with support and resistance lines.
:param x: One-dimensional data set
:param window: How long the trendlines should be. If window < 1, then it
will be taken as a percentage of the size of the data
:param charts: Boolean value saying whether to print chart to screen
"""
import numpy as np
import pandas.io.data as pd
x = np.array(x)
if window < 1:
window = int(window * len(x))
max1 = np.where(x == max(x))[0][0] # find the index of the abs max
min1 = np.where(x == min(x))[0][0] # find the index of the abs min
# First the max
if max1 + window > len(x):
max2 = max(x[0:(max1 - window)])
else:
max2 = max(x[(max1 + window):])
# Now the min
if min1 - window < 0:
min2 = min(x[(min1 + window):])
else:
min2 = min(x[0:(min1 - window)])
# Now find the indices of the secondary extrema
max2 = np.where(x == max2)[0][0] # find the index of the 2nd max
min2 = np.where(x == min2)[0][0] # find the index of the 2nd min
# Create & extend the lines
maxslope = (x[max1] - x[max2]) / (max1 - max2) # slope between max points
minslope = (x[min1] - x[min2]) / (min1 - min2) # slope between min points
a_max = x[max1] - (maxslope * max1) # y-intercept for max trendline
a_min = x[min1] - (minslope * min1) # y-intercept for min trendline
b_max = x[max1] + (maxslope * (len(x) - max1)) # extend to last data pt
b_min = x[min1] + (minslope * (len(x) - min1)) # extend to last data point
maxline = np.linspace(a_max, b_max, len(x)) # Y values between max's
minline = np.linspace(a_min, b_min, len(x)) # Y values between min's
# OUTPUT
trends = np.transpose(np.array((x, maxline, minline)))
trends = pd.DataFrame(trends, index=np.arange(0, len(x)),
columns=['Data', 'Max Line', 'Min Line'])
if charts is True:
from matplotlib.pyplot import plot, grid, show, figure
figure()
plot(trends)
grid()
show()
return trends, maxslope, minslope |
model_zoo.py | import torch
import hashlib
import os
import re
import shutil
import sys
import tempfile
if sys.version_info[0] == 2:
from urlparse import urlparse
from urllib2 import urlopen
else:
from urllib.request import urlopen
from urllib.parse import urlparse
try:
from tqdm import tqdm
except ImportError:
tqdm = None # defined below
# matches bfd8deac from resnet18-bfd8deac.pth
HASH_REGEX = re.compile(r'-([a-f0-9]*)\.')
def load_url(url, model_dir=None, map_location=None):
r"""Loads the Torch serialized object at the given URL.
If the object is already present in `model_dir`, it's deserialized and
returned. The filename part of the URL should follow the naming convention
``filename-<sha256>.ext`` where ``<sha256>`` is the first eight or more
digits of the SHA256 hash of the contents of the file. The hash is used to
ensure unique names and to verify the contents of the file.
The default value of `model_dir` is ``$TORCH_HOME/models`` where
``$TORCH_HOME`` defaults to ``~/.torch``. The default directory can be
overriden with the ``$TORCH_MODEL_ZOO`` environment variable.
Args:
url (string): URL of the object to download
model_dir (string, optional): directory in which to save the object
map_location (optional): a function or a dict specifying how to remap storage locations (see torch.load)
Example:
>>> state_dict = torch.utils.model_zoo.load_url('https://s3.amazonaws.com/pytorch/models/resnet18-5c106cde.pth')
"""
if model_dir is None:
torch_home = os.path.expanduser(os.getenv('TORCH_HOME', '~/.torch'))
model_dir = os.getenv('TORCH_MODEL_ZOO', os.path.join(torch_home, 'models'))
if not os.path.exists(model_dir):
os.makedirs(model_dir)
parts = urlparse(url)
filename = os.path.basename(parts.path)
cached_file = os.path.join(model_dir, filename)
if not os.path.exists(cached_file):
sys.stderr.write('Downloading: "{}" to {}\n'.format(url, cached_file))
hash_prefix = HASH_REGEX.search(filename).group(1)
_download_url_to_file(url, cached_file, hash_prefix)
return torch.load(cached_file, map_location=map_location)
def | (url, dst, hash_prefix):
u = urlopen(url)
meta = u.info()
if hasattr(meta, 'getheaders'):
file_size = int(meta.getheaders("Content-Length")[0])
else:
file_size = int(meta.get_all("Content-Length")[0])
f = tempfile.NamedTemporaryFile(delete=False)
try:
sha256 = hashlib.sha256()
with tqdm(total=file_size) as pbar:
while True:
buffer = u.read(8192)
if len(buffer) == 0:
break
f.write(buffer)
sha256.update(buffer)
pbar.update(len(buffer))
f.close()
digest = sha256.hexdigest()
if digest[:len(hash_prefix)] != hash_prefix:
raise RuntimeError('invalid hash value (expected "{}", got "{}")'
.format(hash_prefix, digest))
shutil.move(f.name, dst)
finally:
f.close()
if os.path.exists(f.name):
os.remove(f.name)
if tqdm is None:
# fake tqdm if it's not installed
class tqdm(object):
def __init__(self, total):
self.total = total
self.n = 0
def update(self, n):
self.n += n
sys.stderr.write("\r{0:.1f}%".format(100 * self.n / float(self.total)))
sys.stderr.flush()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
sys.stderr.write('\n')
| _download_url_to_file |
basicauth.go | package basicauthrequest
import (
"encoding/base64"
"errors"
"net/http"
"strings"
"github.com/golang/glog"
"k8s.io/apiserver/pkg/authentication/authenticator"
"k8s.io/apiserver/pkg/authentication/user"
"github.com/openshift/origin/pkg/auth/prometheus"
)
type basicAuthRequestHandler struct {
provider string
passwordAuthenticator authenticator.Password
removeHeader bool
}
func | (provider string, passwordAuthenticator authenticator.Password, removeHeader bool) authenticator.Request {
return &basicAuthRequestHandler{provider, passwordAuthenticator, removeHeader}
}
func (authHandler *basicAuthRequestHandler) AuthenticateRequest(req *http.Request) (user.Info, bool, error) {
username, password, hasBasicAuth, err := getBasicAuthInfo(req)
if err != nil {
return nil, false, err
}
if !hasBasicAuth {
return nil, false, nil
}
var result string = metrics.SuccessResult
defer func() {
metrics.RecordBasicPasswordAuth(result)
}()
user, ok, err := authHandler.passwordAuthenticator.AuthenticatePassword(username, password)
if ok && authHandler.removeHeader {
req.Header.Del("Authorization")
}
switch {
case err != nil:
glog.Errorf(`Error authenticating login %q with provider %q: %v`, username, authHandler.provider, err)
result = metrics.ErrorResult
case !ok:
glog.V(4).Infof(`Login with provider %q failed for login %q`, authHandler.provider, username)
result = metrics.FailResult
case ok:
glog.V(4).Infof(`Login with provider %q succeeded for login %q: %#v`, authHandler.provider, username, user)
}
return user, ok, err
}
// getBasicAuthInfo returns the username and password in the request's basic-auth Authorization header,
// a boolean indicating whether the request had a valid basic-auth header, and any error encountered
// attempting to extract the basic-auth data.
func getBasicAuthInfo(r *http.Request) (string, string, bool, error) {
// Retrieve the Authorization header and check whether it contains basic auth information
const basicScheme string = "Basic "
auth := r.Header.Get("Authorization")
if !strings.HasPrefix(auth, basicScheme) {
return "", "", false, nil
}
str, err := base64.StdEncoding.DecodeString(auth[len(basicScheme):])
if err != nil {
return "", "", false, errors.New("No valid base64 data in basic auth scheme found")
}
cred := strings.SplitN(string(str), ":", 2)
if len(cred) < 2 {
return "", "", false, errors.New("Invalid Authorization header")
}
return cred[0], cred[1], true, nil
}
| NewBasicAuthAuthentication |
active_plan.rs | use std::mem;
use libc::c_void;
use mmtk::vm::ActivePlan;
use mmtk::{Plan, SelectedPlan};
use mmtk::util::{Address, SynchronizedCounter};
use mmtk::util::OpaquePointer;
use entrypoint::*;
use collection::VMCollection;
use JTOC_BASE;
use JikesRVM;
use SINGLETON;
use mmtk::scheduler::*;
static MUTATOR_COUNTER: SynchronizedCounter = SynchronizedCounter::new(0);
#[derive(Default)]
pub struct VMActivePlan<> {}
impl ActivePlan<JikesRVM> for VMActivePlan {
fn worker(tls: OpaquePointer) -> &'static mut GCWorker<JikesRVM> {
let thread: Address = unsafe { mem::transmute(tls) };
let system_thread = unsafe { (thread + SYSTEM_THREAD_FIELD_OFFSET).load::<Address>() };
let cc = unsafe {
&mut *((system_thread + WORKER_INSTANCE_FIELD_OFFSET)
.load::<*mut GCWorker<JikesRVM>>())
};
cc
}
fn number_of_mutators() -> usize {
unsafe {
(JTOC_BASE + NUM_THREADS_FIELD_OFFSET).load::<usize>()
}
}
fn global() -> &'static SelectedPlan<JikesRVM> {
&SINGLETON.plan
}
unsafe fn is_mutator(tls: OpaquePointer) -> bool {
let thread: Address = unsafe { mem::transmute(tls) };
!(thread + IS_COLLECTOR_FIELD_OFFSET).load::<bool>()
}
// XXX: Are they actually static
unsafe fn mutator(tls: OpaquePointer) -> &'static mut <SelectedPlan<JikesRVM> as Plan>::Mutator {
let thread: Address = unsafe { mem::transmute(tls) };
let mutator = (thread + MMTK_HANDLE_FIELD_OFFSET).load::<usize>();
&mut *(mutator as *mut <SelectedPlan<JikesRVM> as Plan>::Mutator)
}
fn collector_count() -> usize {
unimplemented!()
}
fn reset_mutator_iterator() {
MUTATOR_COUNTER.reset();
}
fn get_next_mutator() -> Option<&'static mut <SelectedPlan<JikesRVM> as Plan>::Mutator> {
loop {
let idx = MUTATOR_COUNTER.increment();
let num_threads = unsafe { (JTOC_BASE + NUM_THREADS_FIELD_OFFSET).load::<usize>() };
if idx >= num_threads {
return None;
} else |
}
}
} | {
let t = unsafe { VMCollection::thread_from_index(idx) };
let active_mutator_context = unsafe { (t + ACTIVE_MUTATOR_CONTEXT_FIELD_OFFSET)
.load::<bool>() };
if active_mutator_context {
unsafe {
let mutator = (t + MMTK_HANDLE_FIELD_OFFSET).load::<usize>();
let ret =
&mut *(mutator as *mut <SelectedPlan<JikesRVM> as Plan>::Mutator);
return Some(ret);
}
}
} |
index.tsx | import useTextInput, { useInputValueState } from '../src/hooks/useTextInput'
import TextInput from '../src/TextInput'
export const TextInputExample0: React.FC = () => {
const textInput = useTextInput()
return <TextInput {...textInput}>index</TextInput>
}
export const TextInputExample1: React.FC = () => {
const input = useInput()
return <input {...input} />
}
export const TextInputExample2: React.FC = () => {
const inputValueState = useInputValueState()
return <input {...inputValueState} />
}
export interface InputHTMLAttributes<T> extends HTMLAttributes<T> {
accept?: string
alt?: string
autoComplete?: string
autoFocus?: boolean
capture?: boolean | string // https://www.w3.org/TR/html-media-capture/#the-capture-attribute
checked?: boolean
crossOrigin?: string
disabled?: boolean
enterKeyHint?:
| 'enter'
| 'done'
| 'go'
| 'next'
| 'previous'
| 'search'
| 'send'
form?: string
formAction?: string
formEncType?: string
formMethod?: string
formNoValidate?: boolean
formTarget?: string
height?: number | string
list?: string
max?: number | string
maxLength?: number
min?: number | string
minLength?: number
multiple?: boolean
name?: string
pattern?: string
placeholder?: string
readOnly?: boolean
required?: boolean
size?: number
src?: string
step?: number | string
type?: string
value?: string | ReadonlyArray<string> | number
width?: number | string
onChange?: ChangeEventHandler<T>
} | import React, { ChangeEventHandler, HTMLAttributes } from 'react'
import useInput from '../src/hooks/useInput' |
|
receiver_test.go | // Copyright 2020, OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package prometheusexecreceiver
import (
"context"
"fmt"
"path/filepath"
"testing"
"time"
"github.com/prometheus/common/model"
promconfig "github.com/prometheus/prometheus/config"
"github.com/prometheus/prometheus/discovery"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/config"
"go.opentelemetry.io/collector/consumer/consumertest"
"go.opentelemetry.io/collector/model/pdata"
"go.opentelemetry.io/collector/service/servicetest"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusexecreceiver/subprocessmanager"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusreceiver"
)
// loadConfigAssertNoError loads the test config and asserts there are no errors, and returns the receiver wanted
func loadConfigAssertNoError(t *testing.T, receiverConfigID config.ComponentID) config.Receiver {
factories, err := componenttest.NopFactories()
assert.NoError(t, err)
factory := NewFactory()
factories.Receivers[factory.Type()] = factory
cfg, err := servicetest.LoadConfigAndValidate(filepath.Join("testdata", "config.yaml"), factories)
assert.NoError(t, err)
assert.NotNil(t, cfg)
return cfg.Receivers[receiverConfigID]
}
// TestExecKeyMissing loads config and asserts there is an error with that config
func TestExecKeyMissing(t *testing.T) {
receiverConfig := loadConfigAssertNoError(t, config.NewComponentID(typeStr))
assertErrorWhenExecKeyMissing(t, receiverConfig)
}
// assertErrorWhenExecKeyMissing makes sure the config passed throws an error, since it's missing the exec key
func assertErrorWhenExecKeyMissing(t *testing.T, errorReceiverConfig config.Receiver) {
_, err := newPromExecReceiver(componenttest.NewNopReceiverCreateSettings(), errorReceiverConfig.(*Config), nil)
assert.Error(t, err, "newPromExecReceiver() didn't return an error")
}
// TestEndToEnd loads the test config and completes an e2e test where Prometheus metrics are scrapped twice from `test_prometheus_exporter.go`
func TestEndToEnd(t *testing.T) {
receiverConfig := loadConfigAssertNoError(t, config.NewComponentIDWithName(typeStr, "end_to_end_test/2"))
// e2e test with port undefined by user
endToEndScrapeTest(t, receiverConfig, "end-to-end port not defined")
}
// endToEndScrapeTest creates a receiver that invokes `go run test_prometheus_exporter.go` and waits until it has scraped the /metrics endpoint twice - the application will crash between each scrape
func endToEndScrapeTest(t *testing.T, receiverConfig config.Receiver, testName string) {
sink := new(consumertest.MetricsSink)
wrapper, err := newPromExecReceiver(componenttest.NewNopReceiverCreateSettings(), receiverConfig.(*Config), sink)
assert.NoError(t, err, "newPromExecReceiver() returned an error")
ctx := context.Background()
err = wrapper.Start(ctx, componenttest.NewNopHost())
assert.NoError(t, err, "Start() returned an error")
defer func() { assert.NoError(t, wrapper.Shutdown(ctx)) }()
var metrics []pdata.Metrics
// Make sure two scrapes have been completed (this implies the process was started, scraped, restarted and finally scraped a second time)
const waitFor = 30 * time.Second
const tick = 100 * time.Millisecond
require.Eventuallyf(t, func() bool {
got := sink.AllMetrics()
if len(got) < 2 {
return false
}
metrics = got
return true
}, waitFor, tick, "Two scrapes not completed after %v (%v)", waitFor, testName)
assertTwoUniqueValuesScraped(t, metrics)
}
// assertTwoUniqueValuesScraped iterates over the found metrics and returns true if it finds at least 2 unique metrics, meaning the endpoint
// was successfully scraped twice AND the subprocess being handled was stopped and restarted
func assertTwoUniqueValuesScraped(t *testing.T, metricsSlice []pdata.Metrics) {
var value float64
for i := range metricsSlice {
ms := metricsSlice[i].ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics()
var tempM pdata.Metric
ok := false
for j := 0; j < ms.Len(); j++ {
if ms.At(j).Name() == "timestamp_now" {
tempM = ms.At(j)
ok = true
break
}
}
require.True(t, ok, "timestamp_now metric not found")
assert.Equal(t, pdata.MetricDataTypeGauge, tempM.DataType())
tempV := tempM.Gauge().DataPoints().At(0).DoubleVal()
if i != 0 && tempV != value {
return
}
if tempV != value {
value = tempV
}
}
assert.Fail(t, fmt.Sprintf("All %v scraped values were non-unique", len(metricsSlice)))
}
func TestConfigBuilderFunctions(t *testing.T) {
configTests := []struct {
name string
customName string
cfg *Config
wantReceiverConfig *prometheusreceiver.Config
wantSubprocessConfig *subprocessmanager.SubprocessConfig
wantErr bool
}{
{
name: "no command",
cfg: &Config{
ReceiverSettings: config.NewReceiverSettings(config.NewComponentID(typeStr)),
ScrapeInterval: 60 * time.Second,
ScrapeTimeout: 10 * time.Second,
Port: 9104,
SubprocessConfig: subprocessmanager.SubprocessConfig{
Command: "",
Env: []subprocessmanager.EnvConfig{},
},
},
wantReceiverConfig: &prometheusreceiver.Config{
ReceiverSettings: config.NewReceiverSettings(config.NewComponentID(typeStr)),
PrometheusConfig: &promconfig.Config{
ScrapeConfigs: []*promconfig.ScrapeConfig{
{
ScrapeInterval: model.Duration(60 * time.Second),
ScrapeTimeout: model.Duration(10 * time.Second),
Scheme: "http",
MetricsPath: "/metrics",
JobName: "prometheus_exec",
HonorLabels: false,
HonorTimestamps: true,
ServiceDiscoveryConfigs: discovery.Configs{
&discovery.StaticConfig{
{
Targets: []model.LabelSet{
{model.AddressLabel: model.LabelValue("localhost:9104")},
},
},
},
},
},
},
},
},
wantSubprocessConfig: &subprocessmanager.SubprocessConfig{
Env: []subprocessmanager.EnvConfig{},
},
wantErr: true,
},
{
name: "normal config",
cfg: &Config{
ReceiverSettings: config.NewReceiverSettings(config.NewComponentIDWithName(typeStr, "mysqld")),
ScrapeInterval: 90 * time.Second,
ScrapeTimeout: 10 * time.Second,
Port: 9104,
SubprocessConfig: subprocessmanager.SubprocessConfig{
Command: "mysqld_exporter",
Env: []subprocessmanager.EnvConfig{
{
Name: "DATA_SOURCE_NAME",
Value: "password:username@(url:port)/dbname",
},
},
},
},
wantReceiverConfig: &prometheusreceiver.Config{
ReceiverSettings: config.NewReceiverSettings(config.NewComponentIDWithName(typeStr, "mysqld")),
PrometheusConfig: &promconfig.Config{
ScrapeConfigs: []*promconfig.ScrapeConfig{
{
ScrapeInterval: model.Duration(90 * time.Second),
ScrapeTimeout: model.Duration(10 * time.Second),
Scheme: "http",
MetricsPath: "/metrics",
JobName: "mysqld",
HonorLabels: false,
HonorTimestamps: true,
ServiceDiscoveryConfigs: discovery.Configs{
&discovery.StaticConfig{
{
Targets: []model.LabelSet{
{model.AddressLabel: model.LabelValue("localhost:9104")},
},
},
},
},
},
},
},
},
wantSubprocessConfig: &subprocessmanager.SubprocessConfig{
Command: "mysqld_exporter",
Env: []subprocessmanager.EnvConfig{
{
Name: "DATA_SOURCE_NAME",
Value: "password:username@(url:port)/dbname",
},
},
},
wantErr: false,
},
{
name: "lots of defaults",
cfg: &Config{
ReceiverSettings: config.NewReceiverSettings(config.NewComponentIDWithName(typeStr, "postgres/test")),
ScrapeInterval: 60 * time.Second,
ScrapeTimeout: 10 * time.Second,
SubprocessConfig: subprocessmanager.SubprocessConfig{
Command: "postgres_exporter",
Env: []subprocessmanager.EnvConfig{
{
Name: "DATA_SOURCE_NAME",
Value: "password:username@(url:port)/dbname",
},
},
},
},
wantReceiverConfig: &prometheusreceiver.Config{
ReceiverSettings: config.NewReceiverSettings(config.NewComponentIDWithName(typeStr, "postgres/test")),
PrometheusConfig: &promconfig.Config{
ScrapeConfigs: []*promconfig.ScrapeConfig{
{
ScrapeInterval: model.Duration(60 * time.Second),
ScrapeTimeout: model.Duration(10 * time.Second),
Scheme: "http",
MetricsPath: "/metrics",
JobName: "postgres/test",
HonorLabels: false,
HonorTimestamps: true,
ServiceDiscoveryConfigs: discovery.Configs{
&discovery.StaticConfig{
{
Targets: []model.LabelSet{
{model.AddressLabel: model.LabelValue("localhost:0")},
},
},
},
},
},
},
},
},
wantSubprocessConfig: &subprocessmanager.SubprocessConfig{
Command: "postgres_exporter",
Env: []subprocessmanager.EnvConfig{
{
Name: "DATA_SOURCE_NAME",
Value: "password:username@(url:port)/dbname",
},
},
},
wantErr: false,
},
}
for _, test := range configTests {
t.Run(test.name, func(t *testing.T) {
got := getPromReceiverConfig(test.cfg)
assert.Equal(t, test.wantReceiverConfig, got)
})
}
for _, test := range configTests {
t.Run(test.name, func(t *testing.T) {
got := getSubprocessConfig(test.cfg)
assert.Equal(t, test.wantSubprocessConfig, got)
})
}
}
func TestFillPortPlaceholders(t *testing.T) {
fillPortPlaceholdersTests := []struct {
name string
wrapper *prometheusExecReceiver
newPort int
want *subprocessmanager.SubprocessConfig
}{
{
name: "port is defined by user",
wrapper: &prometheusExecReceiver{
port: 10500,
config: &Config{
ScrapeTimeout: 10 * time.Second,
SubprocessConfig: subprocessmanager.SubprocessConfig{
Command: "apache_exporter --port:{{port}}",
Env: []subprocessmanager.EnvConfig{
{
Name: "DATA_SOURCE_NAME",
Value: "user:password@(hostname:{{port}})/dbname",
},
{
Name: "SECONDARY_PORT",
Value: "{{port}}",
},
},
},
},
subprocessConfig: &subprocessmanager.SubprocessConfig{
Env: []subprocessmanager.EnvConfig{
{
Name: "DATA_SOURCE_NAME",
},
{
Name: "SECONDARY_PORT",
},
},
},
},
newPort: 10500,
want: &subprocessmanager.SubprocessConfig{
Command: "apache_exporter --port:10500",
Env: []subprocessmanager.EnvConfig{
{
Name: "DATA_SOURCE_NAME",
Value: "user:password@(hostname:10500)/dbname",
},
{
Name: "SECONDARY_PORT",
Value: "10500",
},
},
},
},
{
name: "no string templating",
wrapper: &prometheusExecReceiver{
config: &Config{
ScrapeTimeout: 10 * time.Second,
SubprocessConfig: subprocessmanager.SubprocessConfig{
Command: "apache_exporter",
Env: []subprocessmanager.EnvConfig{
{
Name: "DATA_SOURCE_NAME",
Value: "user:password@(hostname:port)/dbname",
},
{
Name: "SECONDARY_PORT",
Value: "1234",
},
},
},
},
subprocessConfig: &subprocessmanager.SubprocessConfig{
Env: []subprocessmanager.EnvConfig{
{
Name: "DATA_SOURCE_NAME",
},
{
Name: "SECONDARY_PORT",
},
},
},
},
newPort: 0,
want: &subprocessmanager.SubprocessConfig{
Command: "apache_exporter",
Env: []subprocessmanager.EnvConfig{
{
Name: "DATA_SOURCE_NAME",
Value: "user:password@(hostname:port)/dbname",
},
{
Name: "SECONDARY_PORT",
Value: "1234",
},
},
},
},
{
name: "no port defined",
wrapper: &prometheusExecReceiver{
config: &Config{
SubprocessConfig: subprocessmanager.SubprocessConfig{
Command: "apache_exporter --port={{port}}",
Env: []subprocessmanager.EnvConfig{
{
Name: "DATA_SOURCE_NAME",
Value: "user:password@(hostname:{{port}})/dbname",
},
{
Name: "SECONDARY_PORT",
Value: "{{port}}",
},
},
},
},
subprocessConfig: &subprocessmanager.SubprocessConfig{
Env: []subprocessmanager.EnvConfig{
{
Name: "DATA_SOURCE_NAME",
},
{
Name: "SECONDARY_PORT",
},
},
},
},
newPort: 10111,
want: &subprocessmanager.SubprocessConfig{
Command: "apache_exporter --port=10111",
Env: []subprocessmanager.EnvConfig{
{
Name: "DATA_SOURCE_NAME",
Value: "user:password@(hostname:10111)/dbname",
},
{
Name: "SECONDARY_PORT",
Value: "10111",
},
},
},
},
}
for _, test := range fillPortPlaceholdersTests {
t.Run(test.name, func(t *testing.T) {
got := test.wrapper.fillPortPlaceholders(test.newPort)
assert.Equal(t, test.want.Command, got.Command)
assert.Equal(t, test.want.Env, got.Env)
})
}
}
func TestGetDelayAndComputeCrashCount(t *testing.T) {
var (
getDelayAndComputeCrashCountTests = []struct {
name string
elapsed time.Duration
healthyProcessTime time.Duration
crashCount int
healthyCrashCount int
wantDelay time.Duration
wantCrashCount int
}{
{
name: "healthy process 1",
elapsed: 15 * time.Minute,
healthyProcessTime: 30 * time.Minute,
crashCount: 2,
healthyCrashCount: 3,
wantDelay: 1 * time.Second,
wantCrashCount: 3,
},
{
name: "healthy process 2",
elapsed: 15 * time.Hour,
healthyProcessTime: 20 * time.Minute,
crashCount: 6,
healthyCrashCount: 2,
wantDelay: 1 * time.Second,
wantCrashCount: 1,
},
{
name: "unhealthy process 1",
elapsed: 15 * time.Second,
healthyProcessTime: 45 * time.Minute,
crashCount: 4,
healthyCrashCount: 3,
wantCrashCount: 5,
},
{
name: "unhealthy process 2",
elapsed: 15 * time.Second,
healthyProcessTime: 75 * time.Second,
crashCount: 5, | {
name: "unhealthy process 3",
elapsed: 15 * time.Second,
healthyProcessTime: 30 * time.Minute,
crashCount: 6,
healthyCrashCount: 3,
wantCrashCount: 7,
},
{
name: "unhealthy process 4",
elapsed: 15 * time.Second,
healthyProcessTime: 10 * time.Minute,
crashCount: 7,
healthyCrashCount: 3,
wantCrashCount: 8,
},
}
previousResult time.Duration
)
// getDelay() test
t.Run("GetDelay test", func(t *testing.T) {
for _, test := range getDelayAndComputeCrashCountTests {
got := getDelay(test.elapsed, test.healthyProcessTime, test.crashCount, test.healthyCrashCount)
if test.wantDelay > 0 {
assert.Equalf(t, test.wantDelay, got, "getDelay() '%v', got = %v, want %v", test.name, got, test.wantDelay)
continue
}
if previousResult > got {
t.Errorf("getDelay() '%v', got = %v, want something larger than the previous result %v", test.name, got, previousResult)
}
previousResult = got
}
})
// computeCrashCount() test
per := &prometheusExecReceiver{}
for _, test := range getDelayAndComputeCrashCountTests {
t.Run(test.name, func(t *testing.T) {
got := per.computeCrashCount(test.elapsed, test.crashCount)
assert.Equal(t, test.wantCrashCount, got)
})
}
} | healthyCrashCount: 3,
wantCrashCount: 6,
}, |
_parse.py | #!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ........................................ NOTICE
#
# This file has been derived and modified from a source licensed under Apache Version 2.0.
# See files NOTICE and README.md for more details.
#
# ........................................ ******
"""
logtools._parse
Log format parsing programmatic and command-line utilities.
uses the logtools.parsers module
"""
import sys
import logging
from operator import and_
from optparse import OptionParser
from functools import reduce
import json
import logtools.parsers
import logtools.parsers2
from ._config import interpolate_config, AttrDict, setLoglevel
from ._config import checkDpath
from .parsers2 import FileFormat , TraditionalFileFormat, ForwardFormat
from .parsers2 import TraditionalForwardFormat
from .utils import getObj
checkDpath()
__all__ = ['logparse_parse_args', 'logparse', 'logparse_main']
def logparse_parse_args():
parser = OptionParser()
parser.add_option("-p", "--parser", dest="parser", default=None,
help="Log format parser (e.g 'CommonLogFormat'). See documentation for available parsers.") # noqa
parser.add_option("-F", "--format", dest="format", default=None,
help="Format string. Used by the parser (e.g AccessLog format specifier)") # noqa
parser.add_option("-f", "--field", dest="field", default=None,
help="Parsed Field index to output")
parser.add_option("-i", "--ignore", dest="ignore", default=None, action="store_true", # noqa
help="Ignore missing fields errors (skip lines with missing fields)") # noqa
parser.add_option("-H", "--header", dest="header", default=None, action="store_true", # noqa
help="Prepend a header describing the selected fields to output.") # noqa
parser.add_option("-P", "--profile", dest="profile", default='logparse',
help="Configuration profile (section in configuration file)") # noqa
parser.add_option("-R", "--raw", dest="raw", default=None, action="store_true",
help="When set output is not encoded for UTF-8")
## default kept for compatibility
# logging level for debug and other information
parser.add_option("-s","--sym" , type = str,
dest="logLevSym",
help="logging level (symbol)")
parser.add_option("-n","--num" , type=int ,
dest="logLevVal",
help="logging level (value)")
options, args = parser.parse_args()
# Interpolate from configuration
options.parser = interpolate_config(options.parser, options.profile, 'parser')
options.format = interpolate_config(options.format, options.profile, 'format',
default=False)
options.field = interpolate_config(options.field, options.profile, 'field')
options.ignore = interpolate_config(options.ignore, options.profile, 'ignore',
default=False, type=bool)
options.header = interpolate_config(options.header, options.profile, 'header',
default=False, type=bool)
options.raw = interpolate_config(options.raw, options.profile, 'raw')
# Set the logging level
setLoglevel(options)
return AttrDict(options.__dict__), args
def logparse(options, args, fh):
"""Parse given input stream using given
parser class and emit specified field(s)"""
field = options.field
logtools.parsers2.addConfigFileSection()
parser = getObj(options.parser, (logtools.parsers, logtools.parsers2))()
if options.get('format', None):
parser.set_format(options.format)
keyfunc = None
keys = None
if isinstance(options.field, int) or \
(isinstance(options.field, str) and options.field.isdigit()):
# Field given as integer (index)
field = int(options.field) - 1
key_func = lambda x: parser(x.strip()).by_index(field, raw=True)
keys = [options.field]
else:
if isinstance(parser, logtools.parsers2.JSONParserPlus):
key_func = logtools.parsers2.dpath_getter_gen(parser, options.field, options)
else:
# Field given as string
# Check how many fields are requested
keys = options.field.split(",")
L = len(keys)
if L == 1:
key_func = lambda x: parser(x.strip())[field]
else:
# Multiple fields requested
is_indices = reduce(and_, (k.isdigit() for k in keys), True)
key_func = logtools.parsers.multikey_getter_gen(parser, keys,
is_indices=is_indices)
if options.header is True:
yield '\t'.join(keys)
for line in fh:
try:
yield key_func(line)
except KeyError as exc:
# Could not find user-specified field
logging.warn("Could not match user-specified fields: %s", exc)
except ValueError as exc:
# Could not parse the log line
if options.ignore:
logging.debug("Could not match fields for parsed line: %s", line)
continue
else:
logging.error("Could not match fields for parsed line: %s", line)
raise
def | ():
"""Console entry-point"""
options, args = logparse_parse_args()
for row in logparse(options, args, fh=sys.stdin):
if row:
if isinstance(row, dict):
json.dump(row, sys.stdout)
elif options.raw:
print(row)
else:
print( row.encode('ascii', 'ignore') )
return 0
| logparse_main |
recorded.rs | use crate::core::counter::Counter;
use crate::iterators::{HistogramIterator, PickMetadata, PickyIterator};
use crate::Histogram;
/// An iterator that will yield only bins with at least one sample.
pub struct Iter {
visited: Option<usize>, | pub fn new<T: Counter>(hist: &Histogram<T>) -> HistogramIterator<T, Iter> {
HistogramIterator::new(hist, Iter { visited: None })
}
}
impl<T: Counter> PickyIterator<T> for Iter {
fn pick(&mut self, index: usize, _: u64, count_at_index: T) -> Option<PickMetadata> {
if count_at_index != T::zero() && self.visited.map(|i| i != index).unwrap_or(true) {
self.visited = Some(index);
return Some(PickMetadata::new(None, None));
}
None
}
fn more(&mut self, _: usize) -> bool {
false
}
} | }
impl Iter {
/// Construct a new sampled iterator. See `Histogram::iter_recorded` for details. |
helper.go | package helper
import (
"errors"
"reflect"
"regexp"
"runtime"
"strings"
"github.com/micro/go-micro/server"
)
const (
APIMetadataKey = "post_api"
APIVerMetadataKey = "post_api_ver"
)
const (
matchFuncNameExpr = `\(\*{0,1}[a-zA-Z0-9_]+\)\.\w+`
replaceFuncNameExpr = `[\(\)\*]`
)
var (
nilHandlerOption = func(o *server.HandlerOptions) {}
)
func ToHandlerOption(fn interface{}, ver, api string, alias ...string) server.HandlerOption {
if fn == nil {
return nilHandlerOption
}
api = strings.TrimSpace(api)
if api == "" {
return nilHandlerOption
}
apis := []string{api}
if len(alias) > 0 |
strAPIs := strings.Join(apis, ",")
if name, err := FuncName(fn); err != nil {
return nilHandlerOption
} else {
return func(o *server.HandlerOptions) {
o.Metadata[name] = map[string]string{APIMetadataKey: strAPIs, APIVerMetadataKey: ver}
}
}
}
func FuncName(v interface{}) (name string, err error) {
val := reflect.ValueOf(v)
if val.Kind() != reflect.Func {
err = errors.New("value is not a func")
return
}
fullName := runtime.FuncForPC(val.Pointer()).Name()
var r *regexp.Regexp
if r, err = regexp.Compile(matchFuncNameExpr); err != nil {
return
}
tmpName := r.FindString(fullName)
if r, err = regexp.Compile(replaceFuncNameExpr); err != nil {
return
}
name = r.ReplaceAllString(tmpName, "")
return
}
| {
apis = append(apis, alias...)
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.