file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
flowtransformer.go | package flowtransformer
import (
"fmt"
"github.com/skydive-project/skydive/contrib/objectstore/subscriber/flowtransformer/custom1"
"github.com/skydive-project/skydive/flow"
)
// FlowTransformer allows generic transformations of a flow
type FlowTransformer interface {
// Transform transforms a flow before being stored
Transform(flow *flow.Flow) interface{}
}
// New creates a new flow transformer based on a name string
func New(flowTransformerName string) (FlowTransformer, error) | {
switch flowTransformerName {
case "custom1":
return custom1.New(), nil
case "":
return nil, nil
default:
return nil, fmt.Errorf("Marshaller '%s' is not supported", flowTransformerName)
}
} |
|
wrapping.rs | //! Wrapping arithmetic.
use crate::Zero;
use core::fmt;
use subtle::{Choice, ConditionallySelectable, ConstantTimeEq};
/// Provides intentionally-wrapped arithmetic on `T`.
///
/// This is analogous to [`core::num::Wrapping`] but allows this crate to
/// define trait impls for this type.
#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord)]
pub struct Wrapping<T>(pub T);
impl<T: Zero> Zero for Wrapping<T> {
const ZERO: Self = Self(T::ZERO);
}
impl<T: fmt::Display> fmt::Display for Wrapping<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl<T: fmt::Binary> fmt::Binary for Wrapping<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl<T: fmt::Octal> fmt::Octal for Wrapping<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl<T: fmt::LowerHex> fmt::LowerHex for Wrapping<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl<T: fmt::UpperHex> fmt::UpperHex for Wrapping<T> {
fn | (&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl<T: ConditionallySelectable> ConditionallySelectable for Wrapping<T> {
fn conditional_select(a: &Self, b: &Self, choice: Choice) -> Self {
Wrapping(T::conditional_select(&a.0, &b.0, choice))
}
}
impl<T: ConstantTimeEq> ConstantTimeEq for Wrapping<T> {
fn ct_eq(&self, other: &Self) -> Choice {
self.0.ct_eq(&other.0)
}
}
| fmt |
pluralsight.py |
from ..utils import (
update_url_query,
int_or_none
)
from ..utilsEX import url_result
from ..extractor.pluralsight import PluralsightCourseIE as Old
class PluralsightCourseIE(Old):
def _real_extract(self, url):
| course_id = self._match_id(url)
# TODO: PSM cookie
course = self._download_course(course_id, url, course_id)
title = course['title']
course_name = course['name']
course_data = course['modules']
description = course.get('description') or course.get('shortDescription')
entries = []
for num, module in enumerate(course_data, 1):
author = module.get('author')
module_name = module.get('name')
if not author or not module_name:
continue
for clip in module.get('clips', []):
clip_index = int_or_none(clip.get('index'))
if clip_index is None:
continue
clip_url = update_url_query(
'%s/player' % self._API_BASE, query={
'mode': 'live',
'course': course_name,
'author': author,
'name': module_name,
'clip': clip_index,
})
entries.append({
'_type': 'url_transparent',
'url': clip_url,
'duration': clip['duration'],
'title': module.get('title'),
})
return self.playlist_result(entries, course_id, title, description) |
|
proxyconfig.go | // Copyright Istio Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
import (
"context"
"fmt"
"io"
"io/ioutil"
"os"
"regexp"
"strings"
"github.com/ghodss/yaml"
"github.com/spf13/cobra"
"istio.io/istio/istioctl/pkg/util/handlers"
"istio.io/istio/istioctl/pkg/writer/envoy/clusters"
"istio.io/istio/istioctl/pkg/writer/envoy/configdump"
"istio.io/istio/pilot/pkg/model"
"istio.io/istio/pkg/config/host"
"istio.io/pkg/log"
)
const (
jsonOutput = "json"
summaryOutput = "short"
)
var (
fqdn, direction, subset string
port int
verboseProxyConfig bool
address, listenerType string
routeName string
clusterName, status string
// output format (yaml or short)
outputFormat string
)
// Level is an enumeration of all supported log levels.
type Level int
const (
defaultLoggerName = "level"
defaultOutputLevel = WarningLevel
)
const (
// OffLevel disables logging
OffLevel Level = iota
// CriticalLevel enables critical level logging
CriticalLevel
// ErrorLevel enables error level logging
ErrorLevel
// WarningLevel enables warning level logging
WarningLevel
// InfoLevel enables info level logging
InfoLevel
// DebugLevel enables debug level logging
DebugLevel
// TraceLevel enables trace level logging
TraceLevel
)
// existing sorted active loggers
var activeLoggers = []string{
"admin",
"aws",
"assert",
"backtrace",
"client",
"config",
"connection",
"conn_handler", // Added through https://github.com/envoyproxy/envoy/pull/8263
"dubbo",
"file",
"filter",
"forward_proxy",
"grpc",
"hc",
"health_checker",
"http",
"http2",
"hystrix",
"init",
"io",
"jwt",
"kafka",
"lua",
"main",
"misc",
"mongo",
"quic",
"pool",
"rbac",
"redis",
"router",
"runtime",
"stats",
"secret",
"tap",
"testing",
"thrift",
"tracing",
"upstream",
"udp",
"wasm",
}
var levelToString = map[Level]string{
TraceLevel: "trace",
DebugLevel: "debug",
InfoLevel: "info",
WarningLevel: "warning",
ErrorLevel: "error",
CriticalLevel: "critical",
OffLevel: "off",
}
var stringToLevel = map[string]Level{
"trace": TraceLevel,
"debug": DebugLevel,
"info": InfoLevel,
"warning": WarningLevel,
"error": ErrorLevel,
"critical": CriticalLevel,
"off": OffLevel,
}
var (
loggerLevelString = ""
reset = false
)
func setupPodConfigdumpWriter(podName, podNamespace string, out io.Writer) (*configdump.ConfigWriter, error) {
kubeClient, err := kubeClient(kubeconfig, configContext)
if err != nil {
return nil, fmt.Errorf("failed to create k8s client: %v", err)
}
path := "config_dump"
debug, err := kubeClient.EnvoyDo(context.TODO(), podName, podNamespace, "GET", path, nil)
if err != nil {
return nil, fmt.Errorf("failed to execute command on %s.%s sidecar: %v", podName, podNamespace, err)
}
return setupConfigdumpEnvoyConfigWriter(debug, out)
}
func setupFileConfigdumpWriter(filename string, out io.Writer) (*configdump.ConfigWriter, error) {
file := os.Stdin
if filename != "-" {
var err error
file, err = os.Open(filename)
if err != nil {
return nil, err
}
}
defer func() {
if err := file.Close(); err != nil {
log.Errorf("failed to close %s: %s", filename, err)
}
}()
data, err := ioutil.ReadAll(file)
if err != nil {
return nil, err
}
return setupConfigdumpEnvoyConfigWriter(data, out)
}
func setupConfigdumpEnvoyConfigWriter(debug []byte, out io.Writer) (*configdump.ConfigWriter, error) {
cw := &configdump.ConfigWriter{Stdout: out}
err := cw.Prime(debug)
if err != nil {
return nil, err
}
return cw, nil
}
func setupEnvoyLogConfig(param, podName, podNamespace string) (string, error) {
kubeClient, err := kubeClient(kubeconfig, configContext)
if err != nil {
return "", fmt.Errorf("failed to create Kubernetes client: %v", err)
}
path := "logging"
if param != "" {
path = path + "?" + param
}
result, err := kubeClient.EnvoyDo(context.TODO(), podName, podNamespace, "POST", path, nil)
if err != nil {
return "", fmt.Errorf("failed to execute command on Envoy: %v", err)
}
return string(result), nil
}
func getLogLevelFromConfigMap() (string, error) {
valuesConfig, err := getValuesFromConfigMap(kubeconfig)
if err != nil {
return "", err
}
var values struct {
SidecarInjectorWebhook struct {
Global struct {
Proxy struct {
LogLevel string `json:"logLevel"`
} `json:"proxy"`
} `json:"global"`
} `json:"sidecarInjectorWebhook"`
}
if err := yaml.Unmarshal([]byte(valuesConfig), &values); err != nil {
return "", fmt.Errorf("failed to parse values config: %v [%v]", err, valuesConfig)
}
return values.SidecarInjectorWebhook.Global.Proxy.LogLevel, nil
}
func setupPodClustersWriter(podName, podNamespace string, out io.Writer) (*clusters.ConfigWriter, error) {
kubeClient, err := kubeClient(kubeconfig, configContext)
if err != nil {
return nil, fmt.Errorf("failed to create k8s client: %v", err)
}
path := "clusters?format=json"
debug, err := kubeClient.EnvoyDo(context.TODO(), podName, podNamespace, "GET", path, nil)
if err != nil {
return nil, fmt.Errorf("failed to execute command on Envoy: %v", err)
}
return setupClustersEnvoyConfigWriter(debug, out)
}
func setupFileClustersWriter(filename string, out io.Writer) (*clusters.ConfigWriter, error) |
// TODO(fisherxu): migrate this to config dump when implemented in Envoy
// Issue to track -> https://github.com/envoyproxy/envoy/issues/3362
func setupClustersEnvoyConfigWriter(debug []byte, out io.Writer) (*clusters.ConfigWriter, error) {
cw := &clusters.ConfigWriter{Stdout: out}
err := cw.Prime(debug)
if err != nil {
return nil, err
}
return cw, nil
}
func clusterConfigCmd() *cobra.Command {
clusterConfigCmd := &cobra.Command{
Use: "cluster [<pod-name[.namespace]>]",
Short: "Retrieves cluster configuration for the Envoy in the specified pod",
Long: `Retrieve information about cluster configuration for the Envoy instance in the specified pod.`,
Example: ` # Retrieve summary about cluster configuration for a given pod from Envoy.
istioctl proxy-config clusters <pod-name[.namespace]>
# Retrieve cluster summary for clusters with port 9080.
istioctl proxy-config clusters <pod-name[.namespace]> --port 9080
# Retrieve full cluster dump for clusters that are inbound with a FQDN of details.default.svc.cluster.local.
istioctl proxy-config clusters <pod-name[.namespace]> --fqdn details.default.svc.cluster.local --direction inbound -o json
# Retrieve cluster summary without using Kubernetes API
ssh <user@hostname> 'curl localhost:15000/config_dump' > envoy-config.json
istioctl proxy-config clusters --file envoy-config.json
`,
Aliases: []string{"clusters", "c"},
Args: func(cmd *cobra.Command, args []string) error {
if (len(args) == 1) != (configDumpFile == "") {
cmd.Println(cmd.UsageString())
return fmt.Errorf("cluster requires pod name or --file parameter")
}
return nil
},
RunE: func(c *cobra.Command, args []string) error {
var configWriter *configdump.ConfigWriter
var err error
if len(args) == 1 {
podName, ns := handlers.InferPodInfo(args[0], handlers.HandleNamespace(namespace, defaultNamespace))
configWriter, err = setupPodConfigdumpWriter(podName, ns, c.OutOrStdout())
} else {
configWriter, err = setupFileConfigdumpWriter(configDumpFile, c.OutOrStdout())
}
if err != nil {
return err
}
filter := configdump.ClusterFilter{
FQDN: host.Name(fqdn),
Port: port,
Subset: subset,
Direction: model.TrafficDirection(direction),
}
switch outputFormat {
case summaryOutput:
return configWriter.PrintClusterSummary(filter)
case jsonOutput:
return configWriter.PrintClusterDump(filter)
default:
return fmt.Errorf("output format %q not supported", outputFormat)
}
},
}
clusterConfigCmd.PersistentFlags().StringVarP(&outputFormat, "output", "o", summaryOutput, "Output format: one of json|short")
clusterConfigCmd.PersistentFlags().StringVar(&fqdn, "fqdn", "", "Filter clusters by substring of Service FQDN field")
clusterConfigCmd.PersistentFlags().StringVar(&direction, "direction", "", "Filter clusters by Direction field")
clusterConfigCmd.PersistentFlags().StringVar(&subset, "subset", "", "Filter clusters by substring of Subset field")
clusterConfigCmd.PersistentFlags().IntVar(&port, "port", 0, "Filter clusters by Port field")
clusterConfigCmd.PersistentFlags().StringVarP(&configDumpFile, "file", "f", "",
"Envoy config dump JSON file")
return clusterConfigCmd
}
func listenerConfigCmd() *cobra.Command {
listenerConfigCmd := &cobra.Command{
Use: "listener [<pod-name[.namespace]>]",
Short: "Retrieves listener configuration for the Envoy in the specified pod",
Long: `Retrieve information about listener configuration for the Envoy instance in the specified pod.`,
Example: ` # Retrieve summary about listener configuration for a given pod from Envoy.
istioctl proxy-config listeners <pod-name[.namespace]>
# Retrieve listener summary for listeners with port 9080.
istioctl proxy-config listeners <pod-name[.namespace]> --port 9080
# Retrieve full listener dump for HTTP listeners with a wildcard address (0.0.0.0).
istioctl proxy-config listeners <pod-name[.namespace]> --type HTTP --address 0.0.0.0 -o json
# Retrieve listener summary without using Kubernetes API
ssh <user@hostname> 'curl localhost:15000/config_dump' > envoy-config.json
istioctl proxy-config listeners --file envoy-config.json
`,
Aliases: []string{"listeners", "l"},
Args: func(cmd *cobra.Command, args []string) error {
if (len(args) == 1) != (configDumpFile == "") {
cmd.Println(cmd.UsageString())
return fmt.Errorf("listener requires pod name or --file parameter")
}
return nil
},
RunE: func(c *cobra.Command, args []string) error {
var configWriter *configdump.ConfigWriter
var err error
if len(args) == 1 {
podName, ns := handlers.InferPodInfo(args[0], handlers.HandleNamespace(namespace, defaultNamespace))
configWriter, err = setupPodConfigdumpWriter(podName, ns, c.OutOrStdout())
} else {
configWriter, err = setupFileConfigdumpWriter(configDumpFile, c.OutOrStdout())
}
if err != nil {
return err
}
filter := configdump.ListenerFilter{
Address: address,
Port: uint32(port),
Type: listenerType,
Verbose: verboseProxyConfig,
}
switch outputFormat {
case summaryOutput:
return configWriter.PrintListenerSummary(filter)
case jsonOutput:
return configWriter.PrintListenerDump(filter)
default:
return fmt.Errorf("output format %q not supported", outputFormat)
}
},
}
listenerConfigCmd.PersistentFlags().StringVarP(&outputFormat, "output", "o", summaryOutput, "Output format: one of json|short")
listenerConfigCmd.PersistentFlags().StringVar(&address, "address", "", "Filter listeners by address field")
listenerConfigCmd.PersistentFlags().StringVar(&listenerType, "type", "", "Filter listeners by type field")
listenerConfigCmd.PersistentFlags().IntVar(&port, "port", 0, "Filter listeners by Port field")
listenerConfigCmd.PersistentFlags().BoolVar(&verboseProxyConfig, "verbose", true, "Output more information")
listenerConfigCmd.PersistentFlags().StringVarP(&configDumpFile, "file", "f", "",
"Envoy config dump JSON file")
return listenerConfigCmd
}
func logCmd() *cobra.Command {
logCmd := &cobra.Command{
Use: "log <pod-name[.namespace]>",
Short: "(experimental) Retrieves logging levels of the Envoy in the specified pod",
Long: "(experimental) Retrieve information about logging levels of the Envoy instance in the specified pod, and update optionally",
Example: ` # Retrieve information about logging levels for a given pod from Envoy.
istioctl proxy-config log <pod-name[.namespace]>
# Update levels of the all loggers
istioctl proxy-config log <pod-name[.namespace]> --level none
# Update levels of the specified loggers.
istioctl proxy-config log <pod-name[.namespace]> --level http:debug,redis:debug
# Reset levels of all the loggers to default value (warning).
istioctl proxy-config log <pod-name[.namespace]> -r
`,
Aliases: []string{"o"},
Args: func(cmd *cobra.Command, args []string) error {
if len(args) < 1 {
cmd.Println(cmd.UsageString())
return fmt.Errorf("log requires pod name")
}
if reset && loggerLevelString != "" {
cmd.Println(cmd.UsageString())
return fmt.Errorf("--level cannot be combined with --reset")
}
return nil
},
RunE: func(c *cobra.Command, args []string) error {
podName, ns := handlers.InferPodInfo(args[0], handlers.HandleNamespace(namespace, defaultNamespace))
loggerNames, err := setupEnvoyLogConfig("", podName, ns)
if err != nil {
return err
}
destLoggerLevels := map[string]Level{}
if reset {
// reset logging level to `defaultOutputLevel`, and ignore the `level` option
levelString, _ := getLogLevelFromConfigMap()
level, ok := stringToLevel[levelString]
if ok {
destLoggerLevels[defaultLoggerName] = level
} else {
log.Warnf("unable to get logLevel from ConfigMap istio-sidecar-injector, using default value: %v",
levelToString[defaultOutputLevel])
destLoggerLevels[defaultLoggerName] = defaultOutputLevel
}
} else if loggerLevelString != "" {
levels := strings.Split(loggerLevelString, ",")
for _, ol := range levels {
if !strings.Contains(ol, ":") && !strings.Contains(ol, "=") {
level, ok := stringToLevel[ol]
if ok {
destLoggerLevels = map[string]Level{
defaultLoggerName: level,
}
} else {
return fmt.Errorf("unrecognized logging level: %v", ol)
}
} else {
loggerLevel := regexp.MustCompile(`[:=]`).Split(ol, 2)
if !strings.Contains(loggerNames, loggerLevel[0]) {
return fmt.Errorf("unrecognized logger name: %v", loggerLevel[0])
}
level, ok := stringToLevel[loggerLevel[1]]
if !ok {
return fmt.Errorf("unrecognized logging level: %v", loggerLevel[1])
}
destLoggerLevels[loggerLevel[0]] = level
}
}
}
var resp string
if len(destLoggerLevels) == 0 {
resp, err = setupEnvoyLogConfig("", podName, ns)
} else {
if ll, ok := destLoggerLevels[defaultLoggerName]; ok {
// update levels of all loggers first
resp, err = setupEnvoyLogConfig(defaultLoggerName+"="+levelToString[ll], podName, ns)
delete(destLoggerLevels, defaultLoggerName)
}
for lg, ll := range destLoggerLevels {
resp, err = setupEnvoyLogConfig(lg+"="+levelToString[ll], podName, ns)
}
}
if err != nil {
return err
}
_, _ = fmt.Fprint(c.OutOrStdout(), resp)
return nil
},
}
levelListString := fmt.Sprintf("[%s, %s, %s, %s, %s, %s, %s]",
levelToString[TraceLevel],
levelToString[DebugLevel],
levelToString[InfoLevel],
levelToString[WarningLevel],
levelToString[ErrorLevel],
levelToString[CriticalLevel],
levelToString[OffLevel])
s := strings.Join(activeLoggers, ", ")
logCmd.PersistentFlags().BoolVarP(&reset, "reset", "r", reset, "Reset levels to default value (warning).")
logCmd.PersistentFlags().StringVar(&loggerLevelString, "level", loggerLevelString,
fmt.Sprintf("Comma-separated minimum per-logger level of messages to output, in the form of"+
" [<logger>:]<level>,[<logger>:]<level>,... where logger can be one of %s and level can be one of %s",
s, levelListString))
return logCmd
}
func routeConfigCmd() *cobra.Command {
routeConfigCmd := &cobra.Command{
Use: "route [<pod-name[.namespace]>]",
Short: "Retrieves route configuration for the Envoy in the specified pod",
Long: `Retrieve information about route configuration for the Envoy instance in the specified pod.`,
Example: ` # Retrieve summary about route configuration for a given pod from Envoy.
istioctl proxy-config routes <pod-name[.namespace]>
# Retrieve route summary for route 9080.
istioctl proxy-config route <pod-name[.namespace]> --name 9080
# Retrieve full route dump for route 9080
istioctl proxy-config route <pod-name[.namespace]> --name 9080 -o json
# Retrieve route summary without using Kubernetes API
ssh <user@hostname> 'curl localhost:15000/config_dump' > envoy-config.json
istioctl proxy-config routes --file envoy-config.json
`,
Aliases: []string{"routes", "r"},
Args: func(cmd *cobra.Command, args []string) error {
if (len(args) == 1) != (configDumpFile == "") {
cmd.Println(cmd.UsageString())
return fmt.Errorf("route requires pod name or --file parameter")
}
return nil
},
RunE: func(c *cobra.Command, args []string) error {
var configWriter *configdump.ConfigWriter
var err error
if len(args) == 1 {
podName, ns := handlers.InferPodInfo(args[0], handlers.HandleNamespace(namespace, defaultNamespace))
configWriter, err = setupPodConfigdumpWriter(podName, ns, c.OutOrStdout())
} else {
configWriter, err = setupFileConfigdumpWriter(configDumpFile, c.OutOrStdout())
}
if err != nil {
return err
}
filter := configdump.RouteFilter{
Name: routeName,
Verbose: verboseProxyConfig,
}
switch outputFormat {
case summaryOutput:
return configWriter.PrintRouteSummary(filter)
case jsonOutput:
return configWriter.PrintRouteDump(filter)
default:
return fmt.Errorf("output format %q not supported", outputFormat)
}
},
}
routeConfigCmd.PersistentFlags().StringVarP(&outputFormat, "output", "o", summaryOutput, "Output format: one of json|short")
routeConfigCmd.PersistentFlags().StringVar(&routeName, "name", "", "Filter listeners by route name field")
routeConfigCmd.PersistentFlags().BoolVar(&verboseProxyConfig, "verbose", true, "Output more information")
routeConfigCmd.PersistentFlags().StringVarP(&configDumpFile, "file", "f", "",
"Envoy config dump JSON file")
return routeConfigCmd
}
func endpointConfigCmd() *cobra.Command {
endpointConfigCmd := &cobra.Command{
Use: "endpoint [<pod-name[.namespace]>]",
Short: "Retrieves endpoint configuration for the Envoy in the specified pod",
Long: `Retrieve information about endpoint configuration for the Envoy instance in the specified pod.`,
Example: ` # Retrieve full endpoint configuration for a given pod from Envoy.
istioctl proxy-config endpoint <pod-name[.namespace]>
# Retrieve endpoint summary for endpoint with port 9080.
istioctl proxy-config endpoint <pod-name[.namespace]> --port 9080
# Retrieve full endpoint with a address (172.17.0.2).
istioctl proxy-config endpoint <pod-name[.namespace]> --address 172.17.0.2 -o json
# Retrieve full endpoint with a cluster name (outbound|9411||zipkin.istio-system.svc.cluster.local).
istioctl proxy-config endpoint <pod-name[.namespace]> --cluster "outbound|9411||zipkin.istio-system.svc.cluster.local" -o json
# Retrieve full endpoint with the status (healthy).
istioctl proxy-config endpoint <pod-name[.namespace]> --status healthy -ojson
# Retrieve endpoint summary without using Kubernetes API
ssh <user@hostname> 'curl localhost:15000/clusters?format=json' > envoy-clusters.json
istioctl proxy-config endpoints --file envoy-clusters.json
`,
Aliases: []string{"endpoints", "ep"},
Args: func(cmd *cobra.Command, args []string) error {
if (len(args) == 1) != (configDumpFile == "") {
cmd.Println(cmd.UsageString())
return fmt.Errorf("endpoints requires pod name or --file parameter")
}
return nil
},
RunE: func(c *cobra.Command, args []string) error {
var configWriter *clusters.ConfigWriter
var err error
if len(args) == 1 {
podName, ns := handlers.InferPodInfo(args[0], handlers.HandleNamespace(namespace, defaultNamespace))
configWriter, err = setupPodClustersWriter(podName, ns, c.OutOrStdout())
} else {
configWriter, err = setupFileClustersWriter(configDumpFile, c.OutOrStdout())
}
if err != nil {
return err
}
filter := clusters.EndpointFilter{
Address: address,
Port: uint32(port),
Cluster: clusterName,
Status: status,
}
switch outputFormat {
case summaryOutput:
return configWriter.PrintEndpointsSummary(filter)
case jsonOutput:
return configWriter.PrintEndpoints(filter)
default:
return fmt.Errorf("output format %q not supported", outputFormat)
}
},
}
endpointConfigCmd.PersistentFlags().StringVarP(&outputFormat, "output", "o", summaryOutput, "Output format: one of json|short")
endpointConfigCmd.PersistentFlags().StringVar(&address, "address", "", "Filter endpoints by address field")
endpointConfigCmd.PersistentFlags().IntVar(&port, "port", 0, "Filter endpoints by Port field")
endpointConfigCmd.PersistentFlags().StringVar(&clusterName, "cluster", "", "Filter endpoints by cluster name field")
endpointConfigCmd.PersistentFlags().StringVar(&status, "status", "", "Filter endpoints by status field")
endpointConfigCmd.PersistentFlags().StringVarP(&configDumpFile, "file", "f", "",
"Envoy config dump JSON file")
return endpointConfigCmd
}
func bootstrapConfigCmd() *cobra.Command {
bootstrapConfigCmd := &cobra.Command{
Use: "bootstrap [<pod-name[.namespace]>]",
Short: "Retrieves bootstrap configuration for the Envoy in the specified pod",
Long: `Retrieve information about bootstrap configuration for the Envoy instance in the specified pod.`,
Example: ` # Retrieve full bootstrap configuration for a given pod from Envoy.
istioctl proxy-config bootstrap <pod-name[.namespace]>
# Retrieve full bootstrap without using Kubernetes API
ssh <user@hostname> 'curl localhost:15000/config_dump' > envoy-config.json
istioctl proxy-config bootstrap --file envoy-config.json
`,
Aliases: []string{"b"},
Args: func(cmd *cobra.Command, args []string) error {
if (len(args) == 1) != (configDumpFile == "") {
cmd.Println(cmd.UsageString())
return fmt.Errorf("bootstrap requires pod name or --file parameter")
}
return nil
},
RunE: func(c *cobra.Command, args []string) error {
var configWriter *configdump.ConfigWriter
var err error
if len(args) == 1 {
podName, ns := handlers.InferPodInfo(args[0], handlers.HandleNamespace(namespace, defaultNamespace))
configWriter, err = setupPodConfigdumpWriter(podName, ns, c.OutOrStdout())
} else {
configWriter, err = setupFileConfigdumpWriter(configDumpFile, c.OutOrStdout())
}
if err != nil {
return err
}
return configWriter.PrintBootstrapDump()
},
}
bootstrapConfigCmd.PersistentFlags().StringVarP(&configDumpFile, "file", "f", "",
"Envoy config dump JSON file")
return bootstrapConfigCmd
}
func secretConfigCmd() *cobra.Command {
secretConfigCmd := &cobra.Command{
Use: "secret [<pod-name[.namespace]>]",
Short: "(experimental) Retrieves secret configuration for the Envoy in the specified pod",
Long: `(experimental) Retrieve information about secret configuration for the Envoy instance in the specified pod.`,
Example: ` # Retrieve full secret configuration for a given pod from Envoy.
istioctl proxy-config secret <pod-name[.namespace]>
# Retrieve full bootstrap without using Kubernetes API
ssh <user@hostname> 'curl localhost:15000/config_dump' > envoy-config.json
istioctl proxy-config secret --file envoy-config.json
THIS COMMAND IS STILL UNDER ACTIVE DEVELOPMENT AND NOT READY FOR PRODUCTION USE.
`,
Aliases: []string{"s"},
Args: func(cmd *cobra.Command, args []string) error {
if (len(args) == 1) != (configDumpFile == "") {
cmd.Println(cmd.UsageString())
return fmt.Errorf("secret requires pod name or --file parameter")
}
return nil
},
RunE: func(c *cobra.Command, args []string) error {
var configWriter *configdump.ConfigWriter
var err error
if len(args) == 1 {
podName, ns := handlers.InferPodInfo(args[0], handlers.HandleNamespace(namespace, defaultNamespace))
configWriter, err = setupPodConfigdumpWriter(podName, ns, c.OutOrStdout())
} else {
configWriter, err = setupFileConfigdumpWriter(configDumpFile, c.OutOrStdout())
}
if err != nil {
return err
}
switch outputFormat {
case summaryOutput:
return configWriter.PrintSecretSummary()
case jsonOutput:
return configWriter.PrintSecretDump()
default:
return fmt.Errorf("output format %q not supported", outputFormat)
}
},
}
secretConfigCmd.PersistentFlags().StringVarP(&outputFormat, "output", "o", summaryOutput, "Output format: one of json|short")
secretConfigCmd.PersistentFlags().StringVarP(&configDumpFile, "file", "f", "",
"Envoy config dump JSON file")
return secretConfigCmd
}
func proxyConfig() *cobra.Command {
configCmd := &cobra.Command{
Use: "proxy-config",
Short: "Retrieve information about proxy configuration from Envoy [kube only]",
Long: `A group of commands used to retrieve information about proxy configuration from the Envoy config dump`,
Example: ` # Retrieve information about proxy configuration from an Envoy instance.
istioctl proxy-config <clusters|listeners|routes|endpoints|bootstrap> <pod-name[.namespace]>`,
Aliases: []string{"pc"},
}
configCmd.PersistentFlags().StringVarP(&outputFormat, "output", "o", summaryOutput, "Output format: one of json|short")
configCmd.AddCommand(clusterConfigCmd())
configCmd.AddCommand(listenerConfigCmd())
configCmd.AddCommand(logCmd())
configCmd.AddCommand(routeConfigCmd())
configCmd.AddCommand(bootstrapConfigCmd())
configCmd.AddCommand(endpointConfigCmd())
configCmd.AddCommand(secretConfigCmd())
return configCmd
}
| {
file, err := os.Open(filename)
if err != nil {
return nil, err
}
defer func() {
if err := file.Close(); err != nil {
log.Errorf("failed to close %s: %s", filename, err)
}
}()
data, err := ioutil.ReadAll(file)
if err != nil {
return nil, err
}
return setupClustersEnvoyConfigWriter(data, out)
} |
custom_build.rs | use cargo_platform::Cfg;
use std::collections::hash_map::{Entry, HashMap};
use std::collections::{BTreeSet, HashSet};
use std::path::{Path, PathBuf};
use std::str;
use std::sync::Arc;
use crate::core::compiler::job_queue::JobState;
use crate::core::PackageId;
use crate::util::errors::{CargoResult, CargoResultExt};
use crate::util::machine_message::{self, Message};
use crate::util::{self, internal, paths, profile};
use super::job::{Freshness, Job, Work};
use super::{fingerprint, Context, Kind, Unit};
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug, Hash)]
pub struct BuildOutput {
/// Paths to pass to rustc with the `-L` flag.
pub library_paths: Vec<PathBuf>,
/// Names and link kinds of libraries, suitable for the `-l` flag.
pub library_links: Vec<String>,
/// Linker arguments suitable to be passed to `-C link-arg=<args>`
pub linker_args: Vec<String>,
/// Various `--cfg` flags to pass to the compiler.
pub cfgs: Vec<String>,
/// Additional environment variables to run the compiler with.
pub env: Vec<(String, String)>,
/// Metadata to pass to the immediate dependencies.
pub metadata: Vec<(String, String)>,
/// Paths to trigger a rerun of this build script.
/// May be absolute or relative paths (relative to package root).
pub rerun_if_changed: Vec<PathBuf>,
/// Environment variables which, when changed, will cause a rebuild.
pub rerun_if_env_changed: Vec<String>,
/// Warnings generated by this build.
pub warnings: Vec<String>,
}
/// Map of packages to build script output.
///
/// This initially starts out as empty. Overridden build scripts get
/// inserted during `build_map`. The rest of the entries are added
/// immediately after each build script runs.
pub type BuildScriptOutputs = HashMap<(PackageId, Kind), BuildOutput>;
/// Linking information for a `Unit`.
///
/// See `build_map` for more details.
#[derive(Default)]
pub struct BuildScripts {
/// Cargo will use this `to_link` vector to add `-L` flags to compiles as we
/// propagate them upwards towards the final build. Note, however, that we
/// need to preserve the ordering of `to_link` to be topologically sorted.
/// This will ensure that build scripts which print their paths properly will
/// correctly pick up the files they generated (if there are duplicates
/// elsewhere).
///
/// To preserve this ordering, the (id, kind) is stored in two places, once
/// in the `Vec` and once in `seen_to_link` for a fast lookup. We maintain
/// this as we're building interactively below to ensure that the memory
/// usage here doesn't blow up too much.
///
/// For more information, see #2354.
pub to_link: Vec<(PackageId, Kind)>,
/// This is only used while constructing `to_link` to avoid duplicates.
seen_to_link: HashSet<(PackageId, Kind)>,
/// Host-only dependencies that have build scripts.
///
/// This is the set of transitive dependencies that are host-only
/// (proc-macro, plugin, build-dependency) that contain a build script.
/// Any `BuildOutput::library_paths` path relative to `target` will be
/// added to LD_LIBRARY_PATH so that the compiler can find any dynamic
/// libraries a build script may have generated.
pub plugins: BTreeSet<PackageId>,
}
/// Dependency information as declared by a build script.
#[derive(Debug)]
pub struct BuildDeps {
/// Absolute path to the file in the target directory that stores the
/// output of the build script.
pub build_script_output: PathBuf,
/// Files that trigger a rebuild if they change.
pub rerun_if_changed: Vec<PathBuf>,
/// Environment variables that trigger a rebuild if they change.
pub rerun_if_env_changed: Vec<String>,
}
/// Prepares a `Work` that executes the target as a custom build script.
pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<Job> {
let _p = profile::start(format!(
"build script prepare: {}/{}",
unit.pkg,
unit.target.name()
));
let key = (unit.pkg.package_id(), unit.kind);
if cx.build_script_outputs.lock().unwrap().contains_key(&key) {
// The output is already set, thus the build script is overridden.
fingerprint::prepare_target(cx, unit, false)
} else {
build_work(cx, unit)
}
}
fn emit_build_output(state: &JobState<'_>, output: &BuildOutput, package_id: PackageId) {
let library_paths = output
.library_paths
.iter()
.map(|l| l.display().to_string())
.collect::<Vec<_>>();
let msg = machine_message::BuildScript {
package_id,
linked_libs: &output.library_links,
linked_paths: &library_paths,
cfgs: &output.cfgs,
env: &output.env,
}
.to_json_string();
state.stdout(msg);
}
fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<Job> {
assert!(unit.mode.is_run_custom_build());
let bcx = &cx.bcx;
let dependencies = cx.dep_targets(unit);
let build_script_unit = dependencies
.iter()
.find(|d| !d.mode.is_run_custom_build() && d.target.is_custom_build())
.expect("running a script not depending on an actual script");
let script_dir = cx.files().build_script_dir(build_script_unit);
let script_out_dir = cx.files().build_script_out_dir(unit);
let script_run_dir = cx.files().build_script_run_dir(unit);
let build_plan = bcx.build_config.build_plan;
let invocation_name = unit.buildkey();
if let Some(deps) = unit.pkg.manifest().metabuild() {
prepare_metabuild(cx, build_script_unit, deps)?;
}
// Building the command to execute
let to_exec = script_dir.join(unit.target.name());
// Start preparing the process to execute, starting out with some
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
// package's library profile.
// NOTE: if you add any profile flags, be sure to update
// `Profiles::get_profile_run_custom_build` so that those flags get
// carried over.
let to_exec = to_exec.into_os_string();
let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?;
let debug = unit.profile.debuginfo.unwrap_or(0) != 0;
cmd.env("OUT_DIR", &script_out_dir)
.env("CARGO_MANIFEST_DIR", unit.pkg.root())
.env("NUM_JOBS", &bcx.jobs().to_string())
.env(
"TARGET",
&match unit.kind {
Kind::Host => bcx.host_triple(),
Kind::Target => bcx.target_triple(),
},
)
.env("DEBUG", debug.to_string())
.env("OPT_LEVEL", &unit.profile.opt_level.to_string())
.env(
"PROFILE",
if bcx.build_config.release {
"release"
} else {
"debug"
},
)
.env("HOST", &bcx.host_triple())
.env("RUSTC", &bcx.rustc.path)
.env("RUSTDOC", &*bcx.config.rustdoc()?)
.inherit_jobserver(&cx.jobserver);
if let Some(ref linker) = bcx.target_config.linker {
cmd.env("RUSTC_LINKER", linker);
}
if let Some(links) = unit.pkg.manifest().links() {
cmd.env("CARGO_MANIFEST_LINKS", links);
}
// Be sure to pass along all enabled features for this package, this is the
// last piece of statically known information that we have.
for feat in &unit.features {
cmd.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1");
}
let mut cfg_map = HashMap::new();
for cfg in bcx.cfg(unit.kind) {
match *cfg {
Cfg::Name(ref n) => {
cfg_map.insert(n.clone(), None);
}
Cfg::KeyPair(ref k, ref v) => {
if let Some(ref mut values) =
*cfg_map.entry(k.clone()).or_insert_with(|| Some(Vec::new()))
{
values.push(v.clone())
}
}
}
}
for (k, v) in cfg_map {
let k = format!("CARGO_CFG_{}", super::envify(&k));
match v {
Some(list) => {
cmd.env(&k, list.join(","));
}
None => {
cmd.env(&k, "");
}
}
}
// Gather the set of native dependencies that this package has along with
// some other variables to close over.
//
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
dependencies
.iter()
.filter_map(|unit| {
if unit.mode.is_run_custom_build() {
Some((
unit.pkg.manifest().links().unwrap().to_string(),
unit.pkg.package_id(),
))
} else {
None
}
})
.collect::<Vec<_>>()
};
let pkg_name = unit.pkg.to_string();
let build_script_outputs = Arc::clone(&cx.build_script_outputs);
let id = unit.pkg.package_id();
let output_file = script_run_dir.join("output");
let err_file = script_run_dir.join("stderr");
let root_output_file = script_run_dir.join("root-output");
let host_target_root = cx.files().host_root().to_path_buf();
let all = (
id,
pkg_name.clone(),
Arc::clone(&build_script_outputs),
output_file.clone(),
script_out_dir.clone(),
);
let build_scripts = cx.build_scripts.get(unit).cloned();
let kind = unit.kind;
let json_messages = bcx.build_config.emit_json();
let extra_verbose = bcx.config.extra_verbose();
let (prev_output, prev_script_out_dir) = prev_build_output(cx, unit);
paths::create_dir_all(&script_dir)?;
paths::create_dir_all(&script_out_dir)?;
// Prepare the unit of "dirty work" which will actually run the custom build
// command.
//
// Note that this has to do some extra work just before running the command
// to determine extra environment variables and such.
let dirty = Work::new(move |state| {
// Make sure that OUT_DIR exists.
//
// If we have an old build directory, then just move it into place,
// otherwise create it!
paths::create_dir_all(&script_out_dir).chain_err(|| {
internal(
"failed to create script output directory for \
build command",
)
})?;
// For all our native lib dependencies, pick up their metadata to pass
// along to this custom build command. We're also careful to augment our
// dynamic library search path in case the build script depended on any
// native dynamic libraries.
if !build_plan {
let build_script_outputs = build_script_outputs.lock().unwrap();
for (name, id) in lib_deps {
let key = (id, kind);
let script_output = build_script_outputs.get(&key).ok_or_else(|| {
internal(format!(
"failed to locate build state for env \
vars: {}/{:?}",
id, kind
))
})?;
let data = &script_output.metadata;
for &(ref key, ref value) in data.iter() {
cmd.env(
&format!("DEP_{}_{}", super::envify(&name), super::envify(key)),
value,
);
}
}
if let Some(build_scripts) = build_scripts {
super::add_plugin_deps(
&mut cmd,
&build_script_outputs,
&build_scripts,
&host_target_root,
)?;
}
}
if build_plan {
state.build_plan(invocation_name, cmd.clone(), Arc::new(Vec::new()));
return Ok(());
}
// And now finally, run the build command itself!
state.running(&cmd);
let timestamp = paths::set_invocation_time(&script_run_dir)?;
let prefix = format!("[{} {}] ", id.name(), id.version());
let output = cmd
.exec_with_streaming(
&mut |stdout| {
if extra_verbose {
state.stdout(format!("{}{}", prefix, stdout));
}
Ok(())
},
&mut |stderr| {
if extra_verbose {
state.stderr(format!("{}{}", prefix, stderr));
}
Ok(())
},
true,
)
.chain_err(|| format!("failed to run custom build command for `{}`", pkg_name))?;
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness).
//
// This is also the location where we provide feedback into the build
// state informing what variables were discovered via our script as
// well.
paths::write(&output_file, &output.stdout)?;
filetime::set_file_times(output_file, timestamp, timestamp)?;
paths::write(&err_file, &output.stderr)?;
paths::write(&root_output_file, util::path2bytes(&script_out_dir)?)?;
let parsed_output =
BuildOutput::parse(&output.stdout, &pkg_name, &script_out_dir, &script_out_dir)?;
if json_messages {
emit_build_output(state, &parsed_output, id);
}
build_script_outputs
.lock()
.unwrap()
.insert((id, kind), parsed_output);
Ok(())
});
// Now that we've prepared our work-to-do, we need to prepare the fresh work
// itself to run when we actually end up just discarding what we calculated
// above.
let fresh = Work::new(move |state| {
let (id, pkg_name, build_script_outputs, output_file, script_out_dir) = all;
let output = match prev_output {
Some(output) => output,
None => BuildOutput::parse_file(
&output_file,
&pkg_name,
&prev_script_out_dir,
&script_out_dir,
)?,
};
if json_messages {
emit_build_output(state, &output, id);
}
build_script_outputs
.lock()
.unwrap()
.insert((id, kind), output);
Ok(())
});
let mut job = if cx.bcx.build_config.build_plan { | };
if job.freshness() == Freshness::Dirty {
job.before(dirty);
} else {
job.before(fresh);
}
Ok(job)
}
impl BuildOutput {
pub fn parse_file(
path: &Path,
pkg_name: &str,
script_out_dir_when_generated: &Path,
script_out_dir: &Path,
) -> CargoResult<BuildOutput> {
let contents = paths::read_bytes(path)?;
BuildOutput::parse(
&contents,
pkg_name,
script_out_dir_when_generated,
script_out_dir,
)
}
// Parses the output of a script.
// The `pkg_name` is used for error messages.
pub fn parse(
input: &[u8],
pkg_name: &str,
script_out_dir_when_generated: &Path,
script_out_dir: &Path,
) -> CargoResult<BuildOutput> {
let mut library_paths = Vec::new();
let mut library_links = Vec::new();
let mut linker_args = Vec::new();
let mut cfgs = Vec::new();
let mut env = Vec::new();
let mut metadata = Vec::new();
let mut rerun_if_changed = Vec::new();
let mut rerun_if_env_changed = Vec::new();
let mut warnings = Vec::new();
let whence = format!("build script of `{}`", pkg_name);
for line in input.split(|b| *b == b'\n') {
let line = match str::from_utf8(line) {
Ok(line) => line.trim(),
Err(..) => continue,
};
let mut iter = line.splitn(2, ':');
if iter.next() != Some("cargo") {
// skip this line since it doesn't start with "cargo:"
continue;
}
let data = match iter.next() {
Some(val) => val,
None => continue,
};
// getting the `key=value` part of the line
let mut iter = data.splitn(2, '=');
let key = iter.next();
let value = iter.next();
let (key, value) = match (key, value) {
(Some(a), Some(b)) => (a, b.trim_end()),
// Line started with `cargo:` but didn't match `key=value`.
_ => failure::bail!("Wrong output in {}: `{}`", whence, line),
};
// This will rewrite paths if the target directory has been moved.
let value = value.replace(
script_out_dir_when_generated.to_str().unwrap(),
script_out_dir.to_str().unwrap(),
);
// Keep in sync with TargetConfig::new.
match key {
"rustc-flags" => {
let (paths, links) = BuildOutput::parse_rustc_flags(&value, &whence)?;
library_links.extend(links.into_iter());
library_paths.extend(paths.into_iter());
}
"rustc-link-lib" => library_links.push(value.to_string()),
"rustc-link-search" => library_paths.push(PathBuf::from(value)),
"rustc-cdylib-link-arg" => linker_args.push(value.to_string()),
"rustc-cfg" => cfgs.push(value.to_string()),
"rustc-env" => env.push(BuildOutput::parse_rustc_env(&value, &whence)?),
"warning" => warnings.push(value.to_string()),
"rerun-if-changed" => rerun_if_changed.push(PathBuf::from(value)),
"rerun-if-env-changed" => rerun_if_env_changed.push(value.to_string()),
_ => metadata.push((key.to_string(), value.to_string())),
}
}
Ok(BuildOutput {
library_paths,
library_links,
linker_args,
cfgs,
env,
metadata,
rerun_if_changed,
rerun_if_env_changed,
warnings,
})
}
pub fn parse_rustc_flags(
value: &str,
whence: &str,
) -> CargoResult<(Vec<PathBuf>, Vec<String>)> {
let value = value.trim();
let mut flags_iter = value
.split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c| !c.is_whitespace()));
let (mut library_paths, mut library_links) = (Vec::new(), Vec::new());
while let Some(flag) = flags_iter.next() {
if flag.starts_with("-l") || flag.starts_with("-L") {
// Check if this flag has no space before the value as is
// common with tools like pkg-config
// e.g. -L/some/dir/local/lib or -licui18n
let (flag, mut value) = flag.split_at(2);
if value.len() == 0 {
value = match flags_iter.next() {
Some(v) => v,
None => failure::bail! {
"Flag in rustc-flags has no value in {}: {}",
whence,
value
},
}
}
match flag {
"-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)),
// This was already checked above
_ => unreachable!(),
};
} else {
failure::bail!(
"Only `-l` and `-L` flags are allowed in {}: `{}`",
whence,
value
)
}
}
Ok((library_paths, library_links))
}
pub fn parse_rustc_env(value: &str, whence: &str) -> CargoResult<(String, String)> {
let mut iter = value.splitn(2, '=');
let name = iter.next();
let val = iter.next();
match (name, val) {
(Some(n), Some(v)) => Ok((n.to_owned(), v.to_owned())),
_ => failure::bail!("Variable rustc-env has no value in {}: {}", whence, value),
}
}
}
fn prepare_metabuild<'a, 'cfg>(
cx: &Context<'a, 'cfg>,
unit: &Unit<'a>,
deps: &[String],
) -> CargoResult<()> {
let mut output = Vec::new();
let available_deps = cx.dep_targets(unit);
// Filter out optional dependencies, and look up the actual lib name.
let meta_deps: Vec<_> = deps
.iter()
.filter_map(|name| {
available_deps
.iter()
.find(|u| u.pkg.name().as_str() == name.as_str())
.map(|dep| dep.target.crate_name())
})
.collect();
for dep in &meta_deps {
output.push(format!("use {};\n", dep));
}
output.push("fn main() {\n".to_string());
for dep in &meta_deps {
output.push(format!(" {}::metabuild();\n", dep));
}
output.push("}\n".to_string());
let output = output.join("");
let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir());
paths::create_dir_all(path.parent().unwrap())?;
paths::write_if_changed(path, &output)?;
Ok(())
}
impl BuildDeps {
pub fn new(output_file: &Path, output: Option<&BuildOutput>) -> BuildDeps {
BuildDeps {
build_script_output: output_file.to_path_buf(),
rerun_if_changed: output
.map(|p| &p.rerun_if_changed)
.cloned()
.unwrap_or_default(),
rerun_if_env_changed: output
.map(|p| &p.rerun_if_env_changed)
.cloned()
.unwrap_or_default(),
}
}
}
/// Computes several maps in `Context`:
/// - `build_scripts`: A map that tracks which build scripts each package
/// depends on.
/// - `build_explicit_deps`: Dependency statements emitted by build scripts
/// from a previous run.
/// - `build_script_outputs`: Pre-populates this with any overridden build
/// scripts.
///
/// The important one here is `build_scripts`, which for each `(package,
/// kind)` stores a `BuildScripts` object which contains a list of
/// dependencies with build scripts that the unit should consider when
/// linking. For example this lists all dependencies' `-L` flags which need to
/// be propagated transitively.
///
/// The given set of units to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> CargoResult<()> {
let mut ret = HashMap::new();
for unit in units {
build(&mut ret, cx, unit)?;
}
cx.build_scripts
.extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v))));
return Ok(());
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
fn build<'a, 'b, 'cfg>(
out: &'a mut HashMap<Unit<'b>, BuildScripts>,
cx: &mut Context<'b, 'cfg>,
unit: &Unit<'b>,
) -> CargoResult<&'a BuildScripts> {
// Do a quick pre-flight check to see if we've already calculated the
// set of dependencies.
if out.contains_key(unit) {
return Ok(&out[unit]);
}
// If there is a build script override, pre-fill the build output.
if let Some(links) = unit.pkg.manifest().links() {
if let Some(output) = cx.bcx.script_override(links, unit.kind) {
let key = (unit.pkg.package_id(), unit.kind);
cx.build_script_outputs
.lock()
.unwrap()
.insert(key, output.clone());
}
}
let mut ret = BuildScripts::default();
if !unit.target.is_custom_build() && unit.pkg.has_custom_build() {
add_to_link(&mut ret, unit.pkg.package_id(), unit.kind);
}
// Load any dependency declarations from a previous run.
if unit.mode.is_run_custom_build() {
parse_previous_explicit_deps(cx, unit)?;
}
// We want to invoke the compiler deterministically to be cache-friendly
// to rustc invocation caching schemes, so be sure to generate the same
// set of build script dependency orderings via sorting the targets that
// come out of the `Context`.
let mut dependencies = cx.dep_targets(unit);
dependencies.sort_by_key(|u| u.pkg.package_id());
for dep_unit in dependencies.iter() {
let dep_scripts = build(out, cx, dep_unit)?;
if dep_unit.target.for_host() {
ret.plugins
.extend(dep_scripts.to_link.iter().map(|p| &p.0).cloned());
} else if dep_unit.target.linkable() {
for &(pkg, kind) in dep_scripts.to_link.iter() {
add_to_link(&mut ret, pkg, kind);
}
}
}
match out.entry(*unit) {
Entry::Vacant(entry) => Ok(entry.insert(ret)),
Entry::Occupied(_) => panic!("cyclic dependencies in `build_map`"),
}
}
// When adding an entry to 'to_link' we only actually push it on if the
// script hasn't seen it yet (e.g., we don't push on duplicates).
fn add_to_link(scripts: &mut BuildScripts, pkg: PackageId, kind: Kind) {
if scripts.seen_to_link.insert((pkg, kind)) {
scripts.to_link.push((pkg, kind));
}
}
fn parse_previous_explicit_deps<'a, 'cfg>(
cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
) -> CargoResult<()> {
let script_run_dir = cx.files().build_script_run_dir(unit);
let output_file = script_run_dir.join("output");
let (prev_output, _) = prev_build_output(cx, unit);
let deps = BuildDeps::new(&output_file, prev_output.as_ref());
cx.build_explicit_deps.insert(*unit, deps);
Ok(())
}
}
/// Returns the previous parsed `BuildOutput`, if any, from a previous
/// execution.
///
/// Also returns the directory containing the output, typically used later in
/// processing.
fn prev_build_output<'a, 'cfg>(
cx: &mut Context<'a, 'cfg>,
unit: &Unit<'a>,
) -> (Option<BuildOutput>, PathBuf) {
let script_out_dir = cx.files().build_script_out_dir(unit);
let script_run_dir = cx.files().build_script_run_dir(unit);
let root_output_file = script_run_dir.join("root-output");
let output_file = script_run_dir.join("output");
let prev_script_out_dir = paths::read_bytes(&root_output_file)
.and_then(|bytes| util::bytes2path(&bytes))
.unwrap_or_else(|_| script_out_dir.clone());
(
BuildOutput::parse_file(
&output_file,
&unit.pkg.to_string(),
&prev_script_out_dir,
&script_out_dir,
)
.ok(),
prev_script_out_dir,
)
} | Job::new(Work::noop(), Freshness::Dirty)
} else {
fingerprint::prepare_target(cx, unit, false)? |
forgot-password.ts | import { Component } from '@angular/core';
import { IonicPage, NavController, NavParams } from 'ionic-angular';
import { UserOfMine } from '../../model/user.interface';
import { LoginPage } from '../login/login';
import * as firebase from 'firebase/app';
@Component({
selector: 'page-forgot-password',
templateUrl: 'forgot-password.html',
})
export class | {
userOfMine = {} as UserOfMine;
constructor(
public navCtrl: NavController,
public navParams: NavParams
) {
}
resetPassword() {
var auth = firebase.auth();
if(this.userOfMine.email==null){
return;
}else{
return auth.sendPasswordResetEmail(this.userOfMine.email)
.then(() => this.navCtrl.setRoot(LoginPage)
)
.catch((error) => console.log("hata var")
);
}
}
}
| ForgotPasswordPage |
Consent.model.ts | import { Record } from './RecordWithLocation.model';
export class Consent extends Record {
public understoodSheet = true;
public questionsOpportunity = true;
public questionsAnswered = true;
public understandWithdrawal = true;
public understandCoding = true;
public secondary = {
agreeArchiving: true,
awareRisks: true,
agreeTakePart: true,
};
public photography = {
agreePhotoTaken: true,
agreePhotoPublished: true,
agreePhotoFutureUse: true,
};
public name: string;
public date = new Date();
public static deserialize(serializedConsent: string): Consent {
const consent = JSON.parse(serializedConsent);
consent.date = new Date(consent.date);
return consent as Consent;
}
constructor() {
super();
}
public isComplete(): boolean {
return (
this.understoodSheet && | this.secondary.agreeArchiving &&
this.secondary.awareRisks &&
this.secondary.agreeTakePart &&
this.name &&
this.date != null
);
}
public getDateString(format: 'ISO' | 'local' = 'ISO'): string {
return Consent.dateToString(this.date, format);
}
public serialize(): string {
return JSON.stringify(this);
}
} | this.questionsOpportunity &&
this.questionsAnswered &&
this.understandWithdrawal &&
this.understandCoding && |
bad_execution_code.py | do_you_know_this_function() |
||
eval.js | 'use strict';
function | (node) {
const op = node.operator;
const left = evaluateConst(node.left);
const right = evaluateConst(node.right);
if (op === '+')
return left + right;
throw new Error(`Unsupported binary operation: "${op}"`);
}
function evaluateConst(node) {
if (node.type === 'Literal')
return node.value;
if (node.type === 'BinaryExpression')
return evaluateBinary(node);
throw new Error(`Unsupported node type: "${node.type}"`);
}
module.exports = evaluateConst;
| evaluateBinary |
files_remote.rs | use anyhow::Result;
use crate::Client;
pub struct FilesRemote {
pub client: Client,
}
impl FilesRemote {
#[doc(hidden)]
pub fn new(client: Client) -> Self {
FilesRemote { client }
}
/**
* This function performs a `POST` to the `/files.remote.add` endpoint.
*
* Adds a file from a remote service
*
* FROM: <https://api.slack.com/methods/files.remote.add>
*/
pub async fn add(&self) -> Result<crate::types::DndEndSchema> {
let url = "/files.remote.add".to_string();
self.client.post(&url, None).await
}
/**
* This function performs a `GET` to the `/files.remote.info` endpoint.
*
* Retrieve information about a remote file added to Slack | * **Parameters:**
*
* * `token: &str` -- Authentication token. Requires scope: `remote_files:read`.
* * `file: &str` -- Specify a file by providing its ID.
* * `external_id: &str` -- Creator defined GUID for the file.
*/
pub async fn info(&self, file: &str, external_id: &str) -> Result<crate::types::DndEndSchema> {
let mut query_args: Vec<(String, String)> = Default::default();
if !external_id.is_empty() {
query_args.push(("external_id".to_string(), external_id.to_string()));
}
if !file.is_empty() {
query_args.push(("file".to_string(), file.to_string()));
}
let query_ = serde_urlencoded::to_string(&query_args).unwrap();
let url = format!("/files.remote.info?{}", query_);
self.client.get(&url, None).await
}
/**
* This function performs a `GET` to the `/files.remote.list` endpoint.
*
* Retrieve information about a remote file added to Slack
*
* FROM: <https://api.slack.com/methods/files.remote.list>
*
* **Parameters:**
*
* * `token: &str` -- Authentication token. Requires scope: `remote_files:read`.
* * `channel: &str` -- Filter files appearing in a specific channel, indicated by its ID.
* * `ts_from: f64` -- Filter files created after this timestamp (inclusive).
* * `ts_to: f64` -- Filter files created before this timestamp (inclusive).
* * `limit: i64` -- The maximum number of items to return.
* * `cursor: &str` -- Paginate through collections of data by setting the `cursor` parameter to a `next_cursor` attribute returned by a previous request's `response_metadata`. Default value fetches the first "page" of the collection. See [pagination](/docs/pagination) for more detail.
*/
pub async fn list(
&self,
channel: &str,
ts_from: f64,
ts_to: f64,
limit: i64,
cursor: &str,
) -> Result<crate::types::DndEndSchema> {
let mut query_args: Vec<(String, String)> = Default::default();
if !channel.is_empty() {
query_args.push(("channel".to_string(), channel.to_string()));
}
if !cursor.is_empty() {
query_args.push(("cursor".to_string(), cursor.to_string()));
}
if limit > 0 {
query_args.push(("limit".to_string(), limit.to_string()));
}
if !ts_from.to_string().is_empty() {
query_args.push(("ts_from".to_string(), ts_from.to_string()));
}
if !ts_to.to_string().is_empty() {
query_args.push(("ts_to".to_string(), ts_to.to_string()));
}
let query_ = serde_urlencoded::to_string(&query_args).unwrap();
let url = format!("/files.remote.list?{}", query_);
self.client.get(&url, None).await
}
/**
* This function performs a `POST` to the `/files.remote.remove` endpoint.
*
* Remove a remote file.
*
* FROM: <https://api.slack.com/methods/files.remote.remove>
*/
pub async fn remove(&self) -> Result<crate::types::DndEndSchema> {
let url = "/files.remote.remove".to_string();
self.client.post(&url, None).await
}
/**
* This function performs a `GET` to the `/files.remote.share` endpoint.
*
* Share a remote file into a channel.
*
* FROM: <https://api.slack.com/methods/files.remote.share>
*
* **Parameters:**
*
* * `token: &str` -- Authentication token. Requires scope: `remote_files:share`.
* * `file: &str` -- Specify a file registered with Slack by providing its ID. Either this field or `external_id` or both are required.
* * `external_id: &str` -- The globally unique identifier (GUID) for the file, as set by the app registering the file with Slack. Either this field or `file` or both are required.
* * `channels: &str` -- Comma-separated list of channel IDs where the file will be shared.
*/
pub async fn share(
&self,
file: &str,
external_id: &str,
channels: &str,
) -> Result<crate::types::DndEndSchema> {
let mut query_args: Vec<(String, String)> = Default::default();
if !channels.is_empty() {
query_args.push(("channels".to_string(), channels.to_string()));
}
if !external_id.is_empty() {
query_args.push(("external_id".to_string(), external_id.to_string()));
}
if !file.is_empty() {
query_args.push(("file".to_string(), file.to_string()));
}
let query_ = serde_urlencoded::to_string(&query_args).unwrap();
let url = format!("/files.remote.share?{}", query_);
self.client.get(&url, None).await
}
/**
* This function performs a `POST` to the `/files.remote.update` endpoint.
*
* Updates an existing remote file.
*
* FROM: <https://api.slack.com/methods/files.remote.update>
*/
pub async fn update(&self) -> Result<crate::types::DndEndSchema> {
let url = "/files.remote.update".to_string();
self.client.post(&url, None).await
}
} | *
* FROM: <https://api.slack.com/methods/files.remote.info>
* |
host.rs | use anyhow::Result;
wit_bindgen_wasmtime::export!("./tests/runtime/numbers/imports.wit");
#[derive(Default)]
pub struct MyImports {
scalar: u32,
} | }
fn roundtrip_s8(&mut self, val: i8) -> i8 {
val
}
fn roundtrip_u16(&mut self, val: u16) -> u16 {
val
}
fn roundtrip_s16(&mut self, val: i16) -> i16 {
val
}
fn roundtrip_u32(&mut self, val: u32) -> u32 {
val
}
fn roundtrip_s32(&mut self, val: i32) -> i32 {
val
}
fn roundtrip_u64(&mut self, val: u64) -> u64 {
val
}
fn roundtrip_s64(&mut self, val: i64) -> i64 {
val
}
fn roundtrip_f32(&mut self, val: f32) -> f32 {
val
}
fn roundtrip_f64(&mut self, val: f64) -> f64 {
val
}
fn roundtrip_char(&mut self, val: char) -> char {
val
}
fn set_scalar(&mut self, val: u32) {
self.scalar = val;
}
fn get_scalar(&mut self) -> u32 {
self.scalar
}
}
wit_bindgen_wasmtime::import!("./tests/runtime/numbers/exports.wit");
fn run(wasm: &str) -> Result<()> {
let (exports, mut store) = crate::instantiate(
wasm,
|linker| imports::add_to_linker(linker, |cx| -> &mut MyImports { &mut cx.imports }),
|store, module, linker| {
exports::Exports::instantiate(store, module, linker, |cx| &mut cx.exports)
},
)?;
exports.test_imports(&mut store)?;
assert_eq!(exports.roundtrip_u8(&mut store, 1)?, 1);
assert_eq!(
exports.roundtrip_u8(&mut store, u8::min_value())?,
u8::min_value()
);
assert_eq!(
exports.roundtrip_u8(&mut store, u8::max_value())?,
u8::max_value()
);
assert_eq!(exports.roundtrip_s8(&mut store, 1)?, 1);
assert_eq!(
exports.roundtrip_s8(&mut store, i8::min_value())?,
i8::min_value()
);
assert_eq!(
exports.roundtrip_s8(&mut store, i8::max_value())?,
i8::max_value()
);
assert_eq!(exports.roundtrip_u16(&mut store, 1)?, 1);
assert_eq!(
exports.roundtrip_u16(&mut store, u16::min_value())?,
u16::min_value()
);
assert_eq!(
exports.roundtrip_u16(&mut store, u16::max_value())?,
u16::max_value()
);
assert_eq!(exports.roundtrip_s16(&mut store, 1)?, 1);
assert_eq!(
exports.roundtrip_s16(&mut store, i16::min_value())?,
i16::min_value()
);
assert_eq!(
exports.roundtrip_s16(&mut store, i16::max_value())?,
i16::max_value()
);
assert_eq!(exports.roundtrip_u32(&mut store, 1)?, 1);
assert_eq!(
exports.roundtrip_u32(&mut store, u32::min_value())?,
u32::min_value()
);
assert_eq!(
exports.roundtrip_u32(&mut store, u32::max_value())?,
u32::max_value()
);
assert_eq!(exports.roundtrip_s32(&mut store, 1)?, 1);
assert_eq!(
exports.roundtrip_s32(&mut store, i32::min_value())?,
i32::min_value()
);
assert_eq!(
exports.roundtrip_s32(&mut store, i32::max_value())?,
i32::max_value()
);
assert_eq!(exports.roundtrip_u64(&mut store, 1)?, 1);
assert_eq!(
exports.roundtrip_u64(&mut store, u64::min_value())?,
u64::min_value()
);
assert_eq!(
exports.roundtrip_u64(&mut store, u64::max_value())?,
u64::max_value()
);
assert_eq!(exports.roundtrip_s64(&mut store, 1)?, 1);
assert_eq!(
exports.roundtrip_s64(&mut store, i64::min_value())?,
i64::min_value()
);
assert_eq!(
exports.roundtrip_s64(&mut store, i64::max_value())?,
i64::max_value()
);
assert_eq!(exports.roundtrip_f32(&mut store, 1.0)?, 1.0);
assert_eq!(
exports.roundtrip_f32(&mut store, f32::INFINITY)?,
f32::INFINITY
);
assert_eq!(
exports.roundtrip_f32(&mut store, f32::NEG_INFINITY)?,
f32::NEG_INFINITY
);
assert!(exports.roundtrip_f32(&mut store, f32::NAN)?.is_nan());
assert_eq!(exports.roundtrip_f64(&mut store, 1.0)?, 1.0);
assert_eq!(
exports.roundtrip_f64(&mut store, f64::INFINITY)?,
f64::INFINITY
);
assert_eq!(
exports.roundtrip_f64(&mut store, f64::NEG_INFINITY)?,
f64::NEG_INFINITY
);
assert!(exports.roundtrip_f64(&mut store, f64::NAN)?.is_nan());
assert_eq!(exports.roundtrip_char(&mut store, 'a')?, 'a');
assert_eq!(exports.roundtrip_char(&mut store, ' ')?, ' ');
assert_eq!(exports.roundtrip_char(&mut store, '🚩')?, '🚩');
exports.set_scalar(&mut store, 2)?;
assert_eq!(exports.get_scalar(&mut store)?, 2);
exports.set_scalar(&mut store, 4)?;
assert_eq!(exports.get_scalar(&mut store)?, 4);
Ok(())
} |
impl imports::Imports for MyImports {
fn roundtrip_u8(&mut self, val: u8) -> u8 {
val |
get_iot_hub_resource.py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetIotHubResourceResult',
'AwaitableGetIotHubResourceResult',
'get_iot_hub_resource',
]
@pulumi.output_type
class GetIotHubResourceResult:
"""
The description of the IoT hub.
"""
def __init__(__self__, etag=None, id=None, location=None, name=None, properties=None, sku=None, tags=None, type=None):
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if sku and not isinstance(sku, dict):
raise TypeError("Expected argument 'sku' to be a dict")
pulumi.set(__self__, "sku", sku)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
The Etag field is *not* required. If it is provided in the response body, it must also be provided as a header per the normal ETag convention.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> str:
"""
The resource identifier.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> str:
"""
The resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> 'outputs.IotHubPropertiesResponse':
"""
IotHub properties
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def sku(self) -> 'outputs.IotHubSkuInfoResponse':
"""
IotHub SKU info
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
The resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
The resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetIotHubResourceResult(GetIotHubResourceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetIotHubResourceResult(
etag=self.etag,
id=self.id,
location=self.location,
name=self.name,
properties=self.properties,
sku=self.sku,
tags=self.tags,
type=self.type)
def get_iot_hub_resource(resource_group_name: Optional[str] = None,
resource_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetIotHubResourceResult:
| """
The description of the IoT hub.
:param str resource_group_name: The name of the resource group that contains the IoT hub.
:param str resource_name: The name of the IoT hub.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['resourceName'] = resource_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:devices/v20180401:getIotHubResource', __args__, opts=opts, typ=GetIotHubResourceResult).value
return AwaitableGetIotHubResourceResult(
etag=__ret__.etag,
id=__ret__.id,
location=__ret__.location,
name=__ret__.name,
properties=__ret__.properties,
sku=__ret__.sku,
tags=__ret__.tags,
type=__ret__.type) |
|
server.js | const mysql = require("mysql");
const inquirer = require("inquirer");
const cTable = require("console.table")
const connection = mysql.createConnection({
host: "localhost",
// Your port; if not 3306
port: 8889,
// Your username
user: "root",
// Your password
password: "root",
database: "player_trackerDB"
});
connection.connect(function (err) {
if (err) throw err;
runTracker();
});
function runTracker() {
// console.clear();
inquirer
.prompt({
name: "action",
type: "list",
message: "Would you like to :",
choices: [
"View All Groups?",
"View All Players sorted by Position Group?",
"View All Players sorted by Position?",
"View All Players?",
"Add a Group?",
"Add a Player?",
"Update a Player's Position?",
"Quit.",
]
})
.then(function (answer) {
switch (answer.action) {
case "Add a Group?":
addGroup(); // Create a new Group (Coaches or Bench Riders)
break;
case "Add a Player?":
addPlayer(); // What Department? / What role? (What title/ what salary) / What Employee (What first_name/ what last_name)
break;
case "View All Groups?":
viewGroups(); //list each department - done
break;
case "View All Players sorted by Position Group?": // pull first_name last_name title and salary - done
viewPlayersInGroup();
break;
case "View All Players sorted by Position?": // pull first last department_name
viewPlayersInPosition();
break;
case "View All Players?": // pull all employees first_name last_name title salary department_name - done
viewPlayers();
break;
case "Update a Player's Position?": // Change a position for a player
updatePlayerPosition();
break;
case "Quit.": // End program
quit();
break;
};
});
// console.clear();
};
function viewGroups() {
// console.clear();
let query = "SELECT group_name FROM groups";
connection.query(query, function (err, res) {
let table = []
for (let i = 0; i < res.length; i++) {
table.push([res[i].group_name]);
};
console.table("");
console.table(["Position Groups"], table);
console.log("");
console.log("Press up or down arrow to continue");
runTracker();
});
};
function viewPlayers() {
// console.clear();
let query = "SELECT players.first_name, players.last_name, positions.title, positions.salary FROM players INNER JOIN positions ON players.role_id = positions.role_id;";
connection.query(query, function (err, res) {
let table = []
for (let i = 0; i < res.length; i++) {
table.push([res[i].first_name, res[i].last_name, res[i].title, res[i].salary]);
}
console.table("");
console.table(["First Name", "Last Name", "Position", "Salary"], table);
console.log("");
console.log("Press up or down arrow to continue");
runTracker();
})
};
function viewPlayersInGroup() {
// console.clear();
let query2 = "SELECT players.first_name, players.last_name, positions.title, positions.salary, groups.group_name FROM players LEFT JOIN positions ON players.role_id = positions.role_id LEFT JOIN groups ON positions.group_id = groups.group_id";
let table2 = []
connection.query(query2, function (err, res) {
for (let i = 0; i < res.length; i++) {
table2.push([res[i].first_name, res[i].last_name, res[i].title, res[i].salary, res[i].group_name]);
}
console.log("");
console.table(["First Name", "Last Name", "Position", "Salary", "Position Group"], table2);
console.log("");
console.log("Press up or down arrow to continue");
runTracker();
});
};
function quit() {
connection.end()
process.exit()
};
function addGroup() {
inquirer
.prompt([
{
type: "input",
name: "newGroup",
message: "What Position Group would you like to add?"
}
]).then(function (res) {
connection.query(
"INSERT INTO groups SET ?",
{
group_name: res.newGroup,
},
function (err) {
if (err) throw err;
});
runTracker();
});
};
function addPlayer() {
inquirer
.prompt([
{
type: "input",
name: "newPlayerFirst",
message: "What is the First Name of the Player you would like to add?"
},
{
type: "input",
name: "newPlayerLast",
message: "What is the Last Name of the Player you would like to add?"
},
{
type: "list",
name: "newPlayerPosition",
message: "What is the Position of the Player you would like to add?",
choices: [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
]
},
]).then(function (res) {
connection.query(
"INSERT INTO players SET ?",
{
first_name: res.newPlayerFirst,
last_name: res.newPlayerLast,
role_id: res.newPlayerPosition,
manager_id: 1
},
function (err) {
if (err) throw err;
});
viewPlayers();
});
// runTracker();
};
function viewPlayersInPosition() {
// console.clear();
let query = "SELECT players.first_name, players.last_name, positions.title FROM players, positions WHERE players.role_id = positions.role_id";
connection.query(query, function (err, res) {
let table3 = []
for (let i = 0; i < res.length; i++) {
table3.push([res[i].first_name, res[i].last_name, res[i].title]);
}
console.table("");
console.table(["First Name", "Last Name", "Position"], table3);
console.log("");
console.log("Press up or down arrow to continue");
runTracker();
})
};
function updatePlayerPosition() {
inquirer
.prompt([
{
type: "input",
name: "newPlayerId",
message: "What is the Number of the Player you would like to change?"
},
{
type: "list",
name: "changePlayerPosition",
message: "What is the NEW Position of this Player?",
choices: [
1,
2,
3,
4,
5,
6,
7, | 8,
9,
10,
]
},
]).then(function (res) {
let changePlayerPosition = res.changePlayerPosition
let newPlayerId = res.newPlayerId
connection.query(`UPDATE players SET role_id = "${changePlayerPosition}" WHERE employee_id = "${ newPlayerId }";`, function (err, res) {
if (err) throw err;
});
viewPlayers();
});
// runTracker();
}; | |
function.py | from athena_type_converter import convert_result_set, TYPE_CONVERTERS
from base64 import b64encode
from boto3 import client
from json import dumps as jsondumps
from logging import getLogger, INFO
from os import environ
__DATABASE = environ.get('DATABASE', 'default')
__LIMIT = environ.get('LIMIT', 100)
__WORKGROUP = environ.get('WORKGROUP', 'primary')
__timestamp = TYPE_CONVERTERS['timestamp']
TYPE_CONVERTERS['timestamp'] = lambda x: __timestamp(x).isoformat()
__date = TYPE_CONVERTERS['date']
TYPE_CONVERTERS['date'] = lambda x: __date(x).isoformat()
__time = TYPE_CONVERTERS['time']
TYPE_CONVERTERS['time'] = lambda x: __time(x).isoformat()
__varbinary = TYPE_CONVERTERS['varbinary']
TYPE_CONVERTERS['varbinary'] = lambda x: b64encode(__varbinary(x))
TYPE_CONVERTERS['decimal'] = lambda x: float(x) if x else None
def __query(event):
response = __ATHENA.start_query_execution(
QueryString=event['query'].format(**event.get('params', {})),
QueryExecutionContext={
'Database': event.get('database', __DATABASE)
},
WorkGroup=event.get('workgroup', __WORKGROUP)
)
return __get_status(response['QueryExecutionId'])
def __status(event):
return __get_status(event['id'])
def __results(event):
params = {
'QueryExecutionId': event['id'],
'MaxResults': int(event.get('limit', __LIMIT))
}
if event.get('nextToken'):
params['NextToken'] = event['nextToken']
response = __ATHENA.get_query_results(**params)
result = {
'results': convert_result_set(response['ResultSet'])
}
if 'NextToken' in response:
result['nextToken'] = response['NextToken']
return result
getLogger().setLevel(INFO)
__ACTIONS = {
'query': __query,
'status': __status,
'results': __results
}
__ATHENA = client('athena')
def | (event, context):
getLogger().info('Processing event {}'.format(jsondumps(event)))
return __ACTIONS.get(event['action'], __unsupported_action)(event['arguments'])
def __get_status(query_execution_id):
response = __ATHENA.get_query_execution(
QueryExecutionId=query_execution_id
)
status = {}
execution = response['QueryExecution']
status['id'] = execution['QueryExecutionId']
response_status = execution['Status']
status['state'] = response_status['State']
if 'StateChangeReason' in response_status:
status['stateChangeReason'] = response_status['StateChangeReason']
status['submissionDateTime'] = response_status['SubmissionDateTime'].isoformat()
if 'CompletionDateTime' in response_status:
status['completionDateTime'] = response_status['CompletionDateTime'].isoformat()
return status
def __unsupported_action(event):
raise ValueError('Action {} is not supported'.format(event['action']))
| handler |
test_param_grid.py | import datetime
from dateutil.relativedelta import relativedelta
import pytest
from ploomber.util import ParamGrid, Interval
def compare(a, b):
for element in a:
if element not in b:
return False
return len(a) == len(b)
def test_interval():
interval = Interval(datetime.date(year=2010, month=1, day=1),
datetime.date(year=2012, month=1, day=1),
relativedelta(years=1))
expanded = interval.expand()
repr_ = ('Interval from 2010-01-01 to 2012-01-01 with '
'delta relativedelta(years=+1)')
expected = [(datetime.date(2010, 1, 1), datetime.date(2011, 1, 1)),
(datetime.date(2011, 1, 1), datetime.date(2012, 1, 1))]
assert expanded == expected
assert repr(interval) == repr_
def | ():
pg = ParamGrid({'a': [1, 2, 3], 'b': [2, 4, 6]})
assert compare(list(pg.zip()), [{
'a': 1,
'b': 2
}, {
'a': 2,
'b': 4
}, {
'a': 3,
'b': 6
}])
assert compare(list(pg.product()), [{
'a': 1,
'b': 2
}, {
'a': 1,
'b': 4
}, {
'a': 1,
'b': 6
}, {
'a': 2,
'b': 2
}, {
'a': 2,
'b': 4
}, {
'a': 2,
'b': 6
}, {
'a': 3,
'b': 2
}, {
'a': 3,
'b': 4
}, {
'a': 3,
'b': 6
}])
def test_param_grid_w_interval():
pg = ParamGrid({'a': Interval(0, 10, 2), 'b': [2, 4, 6, 8, 10]})
assert compare(list(pg.zip()), [{
'a': (0, 2),
'b': 2
}, {
'a': (2, 4),
'b': 4
}, {
'a': (4, 6),
'b': 6
}, {
'a': (6, 8),
'b': 8
}, {
'a': (8, 10),
'b': 10
}])
def test_param_grid_list():
first = {'a': [1, 2], 'b': [1, 2]}
second = {'c': [3, 4], 'd': [3, 4]}
pg = ParamGrid([first, second])
assert list(pg.product()) == [{
'a': 1,
'b': 1
}, {
'a': 1,
'b': 2
}, {
'a': 2,
'b': 1
}, {
'a': 2,
'b': 2
}, {
'c': 3,
'd': 3
}, {
'c': 3,
'd': 4
}, {
'c': 4,
'd': 3
}, {
'c': 4,
'd': 4
}]
def test_param_grid_with_str_list():
pg = ParamGrid({
'a': ['one', 'another'],
'b': ['more', 'final'],
})
assert len(list(pg.product())) == 4
@pytest.mark.parametrize('val', [
'one',
1,
1.1,
])
def test_param_grid_product_with_single_value(val):
pg = ParamGrid({'a': val, 'b': ['more', 'final']})
assert len(list(pg.product())) == 2
@pytest.mark.parametrize('val', [
'one',
1,
1.1,
])
def test_param_grid_zip_with_single_value(val):
pg = ParamGrid({'a': val, 'b': ['more']})
assert len(list(pg.zip())) == 1
| test_param_grid |
confirm.js | $(".delete").on("submit", function(){
return confirm("Souhaitez-vous supprimer cet élement ?");
});
})($) | (function() {
|
|
validatorcredentials.go | /*
Copyright 2021 NDD.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package nn
import (
"context"
"strings"
"github.com/pkg/errors"
"github.com/yndd/ndd-runtime/pkg/resource"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/types"
)
const (
// Errors
errEmptyTargetSecretReference = "empty target secret reference"
errCredentialSecretDoesNotExist = "credential secret does not exist"
errEmptyTargetAddress = "empty target address"
errMissingUsername = "missing username in credentials"
errMissingPassword = "missing password in credentials"
)
// Credentials holds the information for authenticating with the Server.
type Credentials struct {
Username string
Password string
}
func (v *NnValidator) ValidateCredentials(ctx context.Context, namespace, credentialsName, targetAddress string) (creds *Credentials, err error) {
log := v.log.WithValues("namespace", namespace, "credentialsName", credentialsName, "targetAddress", targetAddress)
log.Debug("Credentials Validation")
// Retrieve the secret from Kubernetes for this network node
if namespace == "" {
namespace = "default"
}
credsSecret, err := v.GetSecret(ctx, namespace, credentialsName)
if err != nil {
return nil, err
}
// Check if address is defined on the network node
if targetAddress == "" {
return nil, errors.New(errEmptyTargetAddress)
}
creds = &Credentials{
Username: strings.TrimSuffix(string(credsSecret.Data["username"]), "\n"),
Password: strings.TrimSuffix(string(credsSecret.Data["password"]), "\n"),
}
log.Debug("Credentials", "creds", creds)
if creds.Username == "" {
return nil, errors.New(errMissingUsername)
}
if creds.Password == "" { |
return creds, nil
}
// Retrieve the secret containing the credentials for talking to the Network Node.
func (v *NnValidator) GetSecret(ctx context.Context, namespace, credentialsName string) (credsSecret *corev1.Secret, err error) {
// check if credentialName is specified
if credentialsName == "" {
return nil, errors.New(errEmptyTargetSecretReference)
}
// check if credential secret exists
secretKey := types.NamespacedName{
Name: credentialsName,
Namespace: namespace,
}
credsSecret = &corev1.Secret{}
if err := v.client.Get(ctx, secretKey, credsSecret); resource.IgnoreNotFound(err) != nil {
return nil, errors.Wrap(err, errCredentialSecretDoesNotExist)
}
return credsSecret, nil
} | return nil, errors.New(errMissingPassword)
} |
fmt.rs | use crate::prelude::*;
#[cfg(feature = "temporal")]
use crate::chunked_array::temporal::{
date32_as_datetime, date64_as_datetime, time64_nanosecond_as_time,
};
use num::{Num, NumCast};
use std::{
fmt,
fmt::{Debug, Display, Formatter},
};
const LIMIT: usize = 25;
#[cfg(feature = "pretty_fmt")]
use comfy_table::modifiers::UTF8_ROUND_CORNERS;
#[cfg(feature = "pretty_fmt")]
use comfy_table::presets::UTF8_FULL;
#[cfg(feature = "pretty_fmt")]
use comfy_table::*;
#[cfg(all(feature = "plain_fmt", not(feature = "pretty_fmt")))]
use prettytable::{Cell, Row, Table};
/// Some unit functions that just pass the integer values if we don't want all chrono functionality
#[cfg(not(feature = "temporal"))]
mod temporal {
pub struct DateTime<T>(T)
where
T: Copy;
impl<T> DateTime<T>
where
T: Copy,
{
pub fn date(&self) -> T {
self.0
}
}
pub fn date32_as_datetime(v: i32) -> DateTime<i32> {
DateTime(v)
}
pub fn date64_as_datetime(v: i64) -> DateTime<i64> {
DateTime(v)
}
pub fn time32_millisecond_as_time(v: i32) -> i32 {
v
}
pub fn time32_second_as_time(v: i32) -> i32 {
v
}
pub fn time64_nanosecond_as_time(v: i64) -> i64 {
v
}
pub fn time64_microsecond_as_time(v: i64) -> i64 {
v
}
pub fn timestamp_nanoseconds_as_datetime(v: i64) -> i64 {
v
}
pub fn timestamp_microseconds_as_datetime(v: i64) -> i64 {
v
}
pub fn timestamp_milliseconds_as_datetime(v: i64) -> i64 {
v
}
pub fn timestamp_seconds_as_datetime(v: i64) -> i64 {
v
}
}
#[cfg(any(feature = "plain_fmt", feature = "pretty_fmt"))]
use std::borrow::Cow;
#[cfg(not(feature = "temporal"))]
use temporal::*;
macro_rules! format_array {
($limit:expr, $f:ident, $a:expr, $dtype:expr, $name:expr, $array_type:expr) => {{
write!(
$f,
"shape: ({},)\n{}: '{}' [{}]\n[\n",
$a.len(),
$array_type,
$name,
$dtype
)?;
let truncate = matches!($a.dtype(), DataType::Utf8);
let limit = std::cmp::min($limit, $a.len());
let write = |v, f: &mut Formatter| {
if truncate {
let v = format!("{}", v);
let v_trunc = &v[..v
.char_indices()
.take(15)
.last()
.map(|(i, c)| i + c.len_utf8())
.unwrap_or(0)];
if v == v_trunc {
write!(f, "\t{}\n", v)?;
} else {
write!(f, "\t{}...\n", v_trunc)?;
}
} else {
write!(f, "\t{}\n", v)?;
};
Ok(())
};
if limit < $a.len() {
for i in 0..limit / 2 {
let v = $a.get_any_value(i);
write(v, $f)?;
}
write!($f, "\t...\n")?;
for i in (0..limit / 2).rev() {
let v = $a.get_any_value($a.len() - i - 1);
write(v, $f)?;
}
} else {
for i in 0..limit {
let v = $a.get_any_value(i);
write(v, $f)?;
}
}
write!($f, "]")
}};
}
#[cfg(feature = "object")]
fn format_object_array(
limit: usize,
f: &mut Formatter<'_>,
object: &dyn SeriesTrait,
name: &str,
array_type: &str,
) -> fmt::Result {
match object.dtype() {
DataType::Object(inner_type) => {
write!(
f,
"shape: ({},)\n{}: '{}' [o][{}]\n[\n",
object.len(),
array_type,
name,
inner_type
)?;
for i in 0..limit {
let v = object.str_value(i);
writeln!(f, "\t{}", v)?;
}
write!(f, "]")
}
_ => unreachable!(),
}
}
macro_rules! set_limit {
($self:ident) => {
std::cmp::min($self.len(), LIMIT)
};
}
impl<T> Debug for ChunkedArray<T>
where
T: PolarsNumericType,
{
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let limit = set_limit!(self);
let dtype = format!("{:?}", T::get_dtype());
format_array!(limit, f, self, dtype, self.name(), "ChunkedArray")
}
}
impl Debug for ChunkedArray<BooleanType> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let limit = set_limit!(self);
format_array!(limit, f, self, "bool", self.name(), "ChunkedArray")
}
}
impl Debug for Utf8Chunked {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
format_array!(80, f, self, "str", self.name(), "ChunkedArray")
}
}
impl Debug for ListChunked {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let limit = set_limit!(self);
format_array!(limit, f, self, "list", self.name(), "ChunkedArray")
}
}
impl Debug for CategoricalChunked {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let limit = set_limit!(self);
format_array!(limit, f, self, "cat", self.name(), "ChunkedArray")
}
}
#[cfg(feature = "object")]
impl<T> Debug for ObjectChunked<T>
where
T: PolarsObject,
{
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let limit = set_limit!(self);
let taker = self.take_rand();
let inner_type = T::type_name();
write!(
f,
"ChunkedArray: '{}' [o][{}]\n[\n",
self.name(),
inner_type
)?;
if limit < self.len() {
for i in 0..limit / 2 {
match taker.get(i) {
None => writeln!(f, "\tnull")?,
Some(val) => writeln!(f, "\t{}", val)?,
};
}
writeln!(f, "\t...")?;
for i in (0..limit / 2).rev() {
match taker.get(self.len() - i - 1) {
None => writeln!(f, "\tnull")?,
Some(val) => writeln!(f, "\t{}", val)?,
};
}
} else {
for i in 0..limit {
match taker.get(i) {
None => writeln!(f, "\tnull")?,
Some(val) => writeln!(f, "\t{}", val)?,
};
}
}
Ok(())
}
}
impl Debug for Series {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let limit = set_limit!(self);
match self.dtype() {
DataType::Boolean => format_array!(
limit,
f,
self.bool().unwrap(),
"bool",
self.name(),
"Series"
),
DataType::Utf8 => {
format_array!(limit, f, self.utf8().unwrap(), "str", self.name(), "Series")
}
DataType::UInt8 => {
format_array!(limit, f, self.u8().unwrap(), "u8", self.name(), "Series")
}
DataType::UInt16 => {
format_array!(limit, f, self.u16().unwrap(), "u6", self.name(), "Series")
}
DataType::UInt32 => {
format_array!(limit, f, self.u32().unwrap(), "u32", self.name(), "Series")
}
DataType::UInt64 => {
format_array!(limit, f, self.u64().unwrap(), "u64", self.name(), "Series")
}
DataType::Int8 => {
format_array!(limit, f, self.i8().unwrap(), "i8", self.name(), "Series")
}
DataType::Int16 => {
format_array!(limit, f, self.i16().unwrap(), "i16", self.name(), "Series")
}
DataType::Int32 => {
format_array!(limit, f, self.i32().unwrap(), "i32", self.name(), "Series")
}
DataType::Int64 => {
format_array!(limit, f, self.i64().unwrap(), "i64", self.name(), "Series")
}
DataType::Float32 => {
format_array!(limit, f, self.f32().unwrap(), "f32", self.name(), "Series")
}
DataType::Float64 => {
format_array!(limit, f, self.f64().unwrap(), "f64", self.name(), "Series")
}
DataType::Date32 => format_array!(
limit,
f,
self.date32().unwrap(),
"date32",
self.name(),
"Series"
),
DataType::Date64 => format_array!(
limit,
f,
self.date64().unwrap(),
"date64",
self.name(),
"Series"
),
DataType::Time64(TimeUnit::Nanosecond) => format_array!(
limit,
f,
self.time64_nanosecond().unwrap(),
"time64(ns)",
self.name(),
"Series"
),
DataType::Duration(TimeUnit::Nanosecond) => format_array!(
limit,
f,
self.duration_nanosecond().unwrap(),
"duration(ns)",
self.name(),
"Series"
),
DataType::Duration(TimeUnit::Millisecond) => format_array!(
limit,
f,
self.duration_millisecond().unwrap(),
"duration(ms)",
self.name(),
"Series"
),
DataType::List(_) => format_array!(
limit,
f,
self.list().unwrap(),
"list",
self.name(),
"Series"
),
#[cfg(feature = "object")]
DataType::Object(_) => {
format_object_array(limit, f, self.as_ref(), self.name(), "Series")
}
DataType::Categorical => format_array!(
limit,
f,
self.categorical().unwrap(),
"cat",
self.name(),
"Series"
),
_ => unimplemented!(),
}
}
}
impl Display for Series {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Debug::fmt(self, f)
}
}
impl Debug for DataFrame {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(self, f)
}
}
#[cfg(any(feature = "plain_fmt", feature = "pretty_fmt"))]
fn prepare_row(row: Vec<Cow<'_, str>>, n_first: usize, n_last: usize) -> Vec<String> {
fn make_str_val(v: &str) -> String {
let string_limit = 32;
let v_trunc = &v[..v
.char_indices()
.take(string_limit)
.last()
.map(|(i, c)| i + c.len_utf8())
.unwrap_or(0)];
if v == v_trunc {
v.to_string()
} else {
format!("{}...", v_trunc)
}
}
let reduce_columns = n_first + n_last < row.len();
let mut row_str = Vec::with_capacity(n_first + n_last + reduce_columns as usize);
for v in row[0..n_first].iter() {
row_str.push(make_str_val(v));
}
if reduce_columns {
row_str.push("...".to_string());
}
for v in row[row.len() - n_last..].iter() {
row_str.push(make_str_val(v));
}
row_str
}
impl Display for DataFrame {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let height = self.height();
if !self.columns.iter().all(|s| s.len() == height) {
panic!("The columns lengths in the DataFrame are not equal.");
}
let max_n_cols = std::env::var("POLARS_FMT_MAX_COLS")
.unwrap_or_else(|_| "8".to_string())
.parse()
.unwrap_or(8);
#[cfg(any(feature = "plain_fmt", feature = "pretty_fmt"))]
let max_n_rows = std::env::var("POLARS_FMT_MAX_ROWS")
.unwrap_or_else(|_| "8".to_string())
.parse()
.unwrap_or(8);
let (n_first, n_last) = if self.width() > max_n_cols {
((max_n_cols + 1) / 2, max_n_cols / 2)
} else {
(self.width(), 0)
};
let reduce_columns = n_first + n_last < self.width();
let field_to_str = |f: &Field| format!("{}\n---\n{}", f.name(), f.data_type());
let mut names = Vec::with_capacity(n_first + n_last + reduce_columns as usize);
let schema = self.schema();
let fields = schema.fields();
for field in fields[0..n_first].iter() {
names.push(field_to_str(field))
}
if reduce_columns {
names.push("...".to_string())
}
for field in fields[self.width() - n_last..].iter() {
names.push(field_to_str(field))
}
#[cfg(feature = "pretty_fmt")]
|
#[cfg(not(any(feature = "plain_fmt", feature = "pretty_fmt")))]
{
write!(
f,
"shape: {:?}\nto see more, compile with 'plain_fmt' or 'pretty_fmt' feature",
self.shape()
)?;
}
#[cfg(all(feature = "plain_fmt", not(feature = "pretty_fmt")))]
{
let mut table = Table::new();
table.set_titles(Row::new(names.into_iter().map(|s| Cell::new(&s)).collect()));
let mut rows = Vec::with_capacity(max_n_rows);
if self.height() > max_n_rows {
for i in 0..(max_n_rows / 2) {
let row = self.columns.iter().map(|s| s.str_value(i)).collect();
rows.push(prepare_row(row, n_first, n_last));
}
let dots = rows[0].iter().map(|_| "...".to_string()).collect();
rows.push(dots);
for i in (self.height() - max_n_rows / 2 - 1)..self.height() {
let row = self.columns.iter().map(|s| s.str_value(i)).collect();
rows.push(prepare_row(row, n_first, n_last));
}
for row in rows {
table.add_row(Row::new(row.into_iter().map(|s| Cell::new(&s)).collect()));
}
} else {
for i in 0..max_n_rows {
if i < self.height() && self.width() > 0 {
let row = self.columns.iter().map(|s| s.str_value(i)).collect();
table.add_row(Row::new(
prepare_row(row, n_first, n_last)
.into_iter()
.map(|s| Cell::new(&s))
.collect(),
));
} else {
break;
}
}
}
write!(f, "shape: {:?}\n{}", self.shape(), table)?;
}
Ok(())
}
}
fn fmt_integer<T: Num + NumCast + Display>(
f: &mut Formatter<'_>,
width: usize,
v: T,
) -> fmt::Result {
write!(f, "{:>width$}", v, width = width)
}
fn fmt_float<T: Num + NumCast>(f: &mut Formatter<'_>, width: usize, v: T) -> fmt::Result {
let v: f64 = NumCast::from(v).unwrap();
if v == 0.0 {
write!(f, "{:>width$.1}", v, width = width)
} else if !(0.0001..=9999.).contains(&v) {
write!(f, "{:>width$e}", v, width = width)
} else {
write!(f, "{:>width$}", v, width = width)
}
}
impl Display for AnyValue<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let width = 0;
match self {
AnyValue::Null => write!(f, "null"),
AnyValue::UInt8(v) => write!(f, "{}", v),
AnyValue::UInt16(v) => write!(f, "{}", v),
AnyValue::UInt32(v) => write!(f, "{}", v),
AnyValue::UInt64(v) => write!(f, "{}", v),
AnyValue::Int8(v) => fmt_integer(f, width, *v),
AnyValue::Int16(v) => fmt_integer(f, width, *v),
AnyValue::Int32(v) => fmt_integer(f, width, *v),
AnyValue::Int64(v) => fmt_integer(f, width, *v),
AnyValue::Float32(v) => fmt_float(f, width, *v),
AnyValue::Float64(v) => fmt_float(f, width, *v),
AnyValue::Boolean(v) => write!(f, "{}", *v),
AnyValue::Utf8(v) => write!(f, "{}", format!("\"{}\"", v)),
#[cfg(feature = "temporal")]
AnyValue::Date32(v) => write!(f, "{}", date32_as_datetime(*v).date()),
#[cfg(feature = "temporal")]
AnyValue::Date64(v) => write!(f, "{}", date64_as_datetime(*v)),
AnyValue::Time64(v, TimeUnit::Nanosecond) => {
write!(f, "{}", time64_nanosecond_as_time(*v))
}
AnyValue::Duration(v, TimeUnit::Nanosecond) => write!(f, "{}", v),
AnyValue::Duration(v, TimeUnit::Millisecond) => write!(f, "{}", v),
AnyValue::List(s) => write!(f, "{}", s.fmt_list()),
#[cfg(feature = "object")]
AnyValue::Object(_) => write!(f, "object"),
_ => unimplemented!(),
}
}
}
macro_rules! fmt_option {
($opt:expr) => {{
match $opt {
Some(v) => format!("{}", v),
None => "null".to_string(),
}
}};
}
macro_rules! impl_fmt_list {
($self:ident) => {{
match $self.len() {
0 => format!("[]"),
1 => format!("[{}]", fmt_option!($self.get(0))),
2 => format!(
"[{}, {}]",
fmt_option!($self.get(0)),
fmt_option!($self.get(1))
),
3 => format!(
"[{}, {}, {}]",
fmt_option!($self.get(0)),
fmt_option!($self.get(1)),
fmt_option!($self.get(2))
),
_ => format!(
"[{}, {}, ... {}]",
fmt_option!($self.get(0)),
fmt_option!($self.get(1)),
fmt_option!($self.get($self.len() - 1))
),
}
}};
}
pub(crate) trait FmtList {
fn fmt_list(&self) -> String;
}
impl<T> FmtList for ChunkedArray<T>
where
T: PolarsNumericType,
T::Native: fmt::Display,
{
fn fmt_list(&self) -> String {
impl_fmt_list!(self)
}
}
impl FmtList for BooleanChunked {
fn fmt_list(&self) -> String {
impl_fmt_list!(self)
}
}
impl FmtList for Utf8Chunked {
fn fmt_list(&self) -> String {
impl_fmt_list!(self)
}
}
impl FmtList for ListChunked {
fn fmt_list(&self) -> String {
impl_fmt_list!(self)
}
}
impl FmtList for CategoricalChunked {
fn fmt_list(&self) -> String {
impl_fmt_list!(self)
}
}
#[cfg(feature = "object")]
impl<T> FmtList for ObjectChunked<T> {
fn fmt_list(&self) -> String {
todo!()
}
}
#[cfg(all(
test,
feature = "temporal",
feature = "dtype-date32",
feature = "dtype-date64"
))]
mod test {
use crate::prelude::*;
use arrow::array::PrimitiveBuilder;
#[test]
fn test_fmt_list() {
let values_builder = PrimitiveBuilder::<UInt32Type>::new(10);
let mut builder = ListPrimitiveChunkedBuilder::new("a", values_builder, 10);
builder.append_slice(Some(&[1, 2, 3]));
builder.append_slice(None);
let list = builder.finish().into_series();
println!("{:?}", list);
assert_eq!(
r#"shape: (2,)
Series: 'a' [list]
[
[1, 2, 3]
null
]"#,
format!("{:?}", list)
);
}
#[test]
#[cfg(feature = "dtype-time64-ns")]
fn test_fmt_temporal() {
let s = Date32Chunked::new_from_opt_slice("date32", &[Some(1), None, Some(3)]);
assert_eq!(
r#"shape: (3,)
Series: 'date32' [date32]
[
1970-01-02
null
1970-01-04
]"#,
format!("{:?}", s.into_series())
);
let s = Date64Chunked::new_from_opt_slice("", &[Some(1), None, Some(1_000_000_000_000)]);
assert_eq!(
r#"shape: (3,)
Series: '' [date64]
[
1970-01-01 00:00:00.001
null
2001-09-09 01:46:40
]"#,
format!("{:?}", s.into_series())
);
let s = Time64NanosecondChunked::new_from_slice(
"",
&[1_000_000, 37_800_005_000_000, 86_399_210_000_000],
);
assert_eq!(
r#"shape: (3,)
Series: '' [time64(ns)]
[
00:00:00.001
10:30:00.005
23:59:59.210
]"#,
format!("{:?}", s.into_series())
)
}
#[test]
fn test_fmt_chunkedarray() {
let ca = Int32Chunked::new_from_opt_slice("date32", &[Some(1), None, Some(3)]);
println!("{:?}", ca);
assert_eq!(
r#"shape: (3,)
ChunkedArray: 'date32' [Int32]
[
1
null
3
]"#,
format!("{:?}", ca)
);
let ca = Utf8Chunked::new_from_slice("name", &["a", "b"]);
println!("{:?}", ca);
assert_eq!(
r#"shape: (2,)
ChunkedArray: 'name' [str]
[
"a"
"b"
]"#,
format!("{:?}", ca)
);
}
#[test]
fn test_fmt_series() {
let s = Series::new("foo", &["Somelongstringto eeat wit me oundaf"]);
dbg!(&s);
assert_eq!(
r#"shape: (1,)
Series: 'foo' [str]
[
"Somelongstring...
]"#,
format!("{:?}", s)
);
let s = Series::new("foo", &["😀😁😂😃😄😅😆😇😈😉😊😋😌😎😏😐😑😒😓"]);
dbg!(&s);
assert_eq!(
r#"shape: (1,)
Series: 'foo' [str]
[
"😀😁😂😃😄😅😆😇😈😉😊😋😌😎...
]"#,
format!("{:?}", s)
);
let s = Series::new("foo", &["yzäöüäöüäöüäö"]);
dbg!(&s);
assert_eq!(
r#"shape: (1,)
Series: 'foo' [str]
[
"yzäöüäöüäöüäö"
]"#,
format!("{:?}", s)
);
let s = Series::new("foo", (0..100).collect::<Vec<_>>());
dbg!(&s);
assert_eq!(
r#"shape: (100,)
Series: 'foo' [i32]
[
0
1
2
3
4
5
6
7
8
9
10
11
...
88
89
90
91
92
93
94
95
96
97
98
99
]"#,
format!("{:?}", s)
);
}
}
| {
let mut table = Table::new();
table
.load_preset(UTF8_FULL)
.set_content_arrangement(ContentArrangement::Dynamic)
.apply_modifier(UTF8_ROUND_CORNERS)
.set_table_width(
std::env::var("POLARS_TABLE_WIDTH")
.map(|s| {
s.parse::<u16>()
.expect("could not parse table width argument")
})
.unwrap_or(100),
)
.set_header(names);
let mut rows = Vec::with_capacity(max_n_rows);
if self.height() > max_n_rows {
for i in 0..(max_n_rows / 2) {
let row = self.columns.iter().map(|s| s.str_value(i)).collect();
rows.push(prepare_row(row, n_first, n_last));
}
let dots = rows[0].iter().map(|_| "...".to_string()).collect();
rows.push(dots);
for i in (self.height() - max_n_rows / 2 - 1)..self.height() {
let row = self.columns.iter().map(|s| s.str_value(i)).collect();
rows.push(prepare_row(row, n_first, n_last));
}
for row in rows {
table.add_row(row);
}
} else {
for i in 0..max_n_rows {
if i < self.height() && self.width() > 0 {
let row = self.columns.iter().map(|s| s.str_value(i)).collect();
table.add_row(prepare_row(row, n_first, n_last));
} else {
break;
}
}
}
write!(f, "shape: {:?}\n{}", self.shape(), table)?;
} |
session_test.go | /*
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
the License. A copy of the License is located at
http://www.apache.org/licenses/LICENSE-2.0
or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
and limitations under the License.
*/
package qldbdriver
import (
"context"
"net/http"
"testing"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/qldbsession"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
func | (t *testing.T) {
t.Run("error", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, mockError)
session := session{mockSessionService, mockLogger}
result, err := session.startTransaction(context.Background())
assert.Equal(t, mockError, err)
assert.Nil(t, result)
})
t.Run("success", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, nil)
session := session{mockSessionService, mockLogger}
result, err := session.startTransaction(context.Background())
assert.NoError(t, err)
assert.Equal(t, mockTransactionID, *result.id)
})
}
func TestSessionEndSession(t *testing.T) {
t.Run("error", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("endSession", mock.Anything).Return(&mockEndSessionResult, mockError)
session := session{mockSessionService, mockLogger}
err := session.endSession(context.Background())
assert.Equal(t, mockError, err)
})
t.Run("success", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("endSession", mock.Anything).Return(&mockEndSessionResult, nil)
session := session{mockSessionService, mockLogger}
err := session.endSession(context.Background())
assert.NoError(t, err)
})
}
func TestSessionExecute(t *testing.T) {
t.Run("success", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, nil)
mockSessionService.On("executeStatement", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
Return(&mockExecuteResult, nil)
mockSessionService.On("commitTransaction", mock.Anything, mock.Anything, mock.Anything).
Return(&mockCommitTransactionResult, nil)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, err)
assert.Equal(t, 3, result)
})
t.Run("startTxnUnknownErrorAbortSuccess", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, mockError)
mockSessionService.On("abortTransaction", mock.Anything).Return(&mockAbortTransactionResult, nil)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, mockError, err.err)
assert.False(t, err.isISE)
assert.False(t, err.canRetry)
assert.True(t, err.abortSuccess)
})
t.Run("startTxnUnknownErrorAbortErr", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, mockError)
mockSessionService.On("abortTransaction", mock.Anything).Return(&mockAbortTransactionResult, mockError)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, mockError, err.err)
assert.False(t, err.isISE)
assert.False(t, err.canRetry)
assert.False(t, err.abortSuccess)
})
t.Run("startTxnISE", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, testISE)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, testISE, err.err)
assert.True(t, err.isISE)
assert.True(t, err.canRetry)
assert.False(t, err.abortSuccess)
})
t.Run("startTxn500AbortSuccess", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, test500)
mockSessionService.On("abortTransaction", mock.Anything).Return(&mockAbortTransactionResult, nil)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, test500, err.err)
assert.Equal(t, "", err.transactionID)
assert.False(t, err.isISE)
assert.True(t, err.canRetry)
assert.True(t, err.abortSuccess)
})
t.Run("startTxn500AbortError", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, test500)
mockSessionService.On("abortTransaction", mock.Anything).Return(&mockAbortTransactionResult, mockError)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, test500, err.err)
assert.Equal(t, "", err.transactionID)
assert.False(t, err.isISE)
assert.True(t, err.canRetry)
assert.False(t, err.abortSuccess)
})
t.Run("executeUnknownErrorAbortSuccess", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, nil)
mockSessionService.On("executeStatement", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
Return(&mockExecuteResult, mockError)
mockSessionService.On("abortTransaction", mock.Anything).Return(&mockAbortTransactionResult, nil)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, mockError, err.err)
assert.False(t, err.isISE)
assert.False(t, err.canRetry)
assert.True(t, err.abortSuccess)
})
t.Run("executeUnknownErrorAbortError", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, nil)
mockSessionService.On("executeStatement", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
Return(&mockExecuteResult, mockError)
mockSessionService.On("abortTransaction", mock.Anything).Return(&mockAbortTransactionResult, mockError)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, mockError, err.err)
assert.False(t, err.isISE)
assert.False(t, err.canRetry)
assert.False(t, err.abortSuccess)
})
t.Run("executeISE", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, nil)
mockSessionService.On("executeStatement", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
Return(&mockExecuteResult, testISE)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, testISE, err.err)
assert.True(t, err.isISE)
assert.True(t, err.canRetry)
assert.False(t, err.abortSuccess)
})
t.Run("execute500AbortSuccess", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, nil)
mockSessionService.On("executeStatement", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
Return(&mockExecuteResult, test500)
mockSessionService.On("abortTransaction", mock.Anything).Return(&mockAbortTransactionResult, nil)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.IsType(t, &txnError{}, err)
assert.Equal(t, test500, err.err)
assert.Equal(t, mockTransactionID, err.transactionID)
assert.False(t, err.isISE)
assert.True(t, err.canRetry)
assert.True(t, err.abortSuccess)
})
t.Run("execute500AbortError", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, nil)
mockSessionService.On("executeStatement", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
Return(&mockExecuteResult, test500)
mockSessionService.On("abortTransaction", mock.Anything).Return(&mockAbortTransactionResult, mockError)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.IsType(t, &txnError{}, err)
assert.Equal(t, test500, err.err)
assert.Equal(t, mockTransactionID, err.transactionID)
assert.False(t, err.isISE)
assert.True(t, err.canRetry)
assert.False(t, err.abortSuccess)
})
t.Run("executeBadReqAbortSuccess", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, nil)
mockSessionService.On("executeStatement", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
Return(&mockExecuteResult, testBadReq)
mockSessionService.On("abortTransaction", mock.Anything).Return(&mockAbortTransactionResult, nil)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, testBadReq, err.err)
assert.False(t, err.isISE)
assert.False(t, err.canRetry)
assert.True(t, err.abortSuccess)
})
t.Run("executeBadReqAbortError", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, nil)
mockSessionService.On("executeStatement", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
Return(&mockExecuteResult, testBadReq)
mockSessionService.On("abortTransaction", mock.Anything).Return(&mockAbortTransactionResult, mockError)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, testBadReq, err.err)
assert.False(t, err.isISE)
assert.False(t, err.canRetry)
assert.False(t, err.abortSuccess)
})
t.Run("commitUnknownErrorAbortSuccess", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, nil)
mockSessionService.On("executeStatement", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
Return(&mockExecuteResult, nil)
mockSessionService.On("commitTransaction", mock.Anything, mock.Anything, mock.Anything).
Return(&mockCommitTransactionResult, mockError)
mockSessionService.On("abortTransaction", mock.Anything).Return(&mockAbortTransactionResult, nil)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, mockError, err.err)
assert.False(t, err.isISE)
assert.False(t, err.canRetry)
assert.True(t, err.abortSuccess)
})
t.Run("commitUnknownErrorAbortError", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, nil)
mockSessionService.On("executeStatement", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
Return(&mockExecuteResult, nil)
mockSessionService.On("commitTransaction", mock.Anything, mock.Anything, mock.Anything).
Return(&mockCommitTransactionResult, mockError)
mockSessionService.On("abortTransaction", mock.Anything).Return(&mockAbortTransactionResult, mockError)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, mockError, err.err)
assert.False(t, err.isISE)
assert.False(t, err.canRetry)
assert.False(t, err.abortSuccess)
})
t.Run("commit500AbortSuccess", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, nil)
mockSessionService.On("executeStatement", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
Return(&mockExecuteResult, nil)
mockSessionService.On("commitTransaction", mock.Anything, mock.Anything, mock.Anything).
Return(&mockCommitTransactionResult, test500)
mockSessionService.On("abortTransaction", mock.Anything).Return(&mockAbortTransactionResult, nil)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, test500, err.err)
assert.Equal(t, mockTransactionID, err.transactionID)
assert.False(t, err.isISE)
assert.True(t, err.canRetry)
assert.True(t, err.abortSuccess)
})
t.Run("commit500AbortError", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, nil)
mockSessionService.On("executeStatement", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
Return(&mockExecuteResult, nil)
mockSessionService.On("commitTransaction", mock.Anything, mock.Anything, mock.Anything).
Return(&mockCommitTransactionResult, test500)
mockSessionService.On("abortTransaction", mock.Anything).Return(&mockAbortTransactionResult, mockError)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, test500, err.err)
assert.Equal(t, mockTransactionID, err.transactionID)
assert.False(t, err.isISE)
assert.True(t, err.canRetry)
assert.False(t, err.abortSuccess)
})
t.Run("commitOCC", func(t *testing.T) {
mockSessionService := new(mockSessionService)
mockSessionService.On("startTransaction", mock.Anything).Return(&mockStartTransactionResult, nil)
mockSessionService.On("executeStatement", mock.Anything, mock.Anything, mock.Anything, mock.Anything).
Return(&mockExecuteResult, nil)
mockSessionService.On("commitTransaction", mock.Anything, mock.Anything, mock.Anything).
Return(&mockCommitTransactionResult, testOCC)
session := session{mockSessionService, mockLogger}
result, err := session.execute(context.Background(), func(txn Transaction) (interface{}, error) {
_, err := txn.Execute("SELECT v FROM table")
if err != nil {
return nil, err
}
return 3, nil
})
assert.Nil(t, result)
assert.Equal(t, testOCC, err.err)
assert.False(t, err.isISE)
assert.True(t, err.canRetry)
assert.True(t, err.abortSuccess)
})
}
var mockTransactionID = "testTransactionIdddddd"
var mockAbortTransactionResult = qldbsession.AbortTransactionResult{}
var mockStartTransactionResult = qldbsession.StartTransactionResult{TransactionId: &mockTransactionID}
var mockEndSessionResult = qldbsession.EndSessionResult{}
var mockExecuteResult = qldbsession.ExecuteStatementResult{
FirstPage: &qldbsession.Page{},
}
var mockHash = []byte{73, 10, 104, 87, 43, 252, 182, 60, 142, 193, 0, 77, 158, 129, 52, 84, 126, 196, 120, 55, 241, 253, 113, 114, 114, 53, 233, 223, 234, 227, 191, 172}
var mockCommitTransactionResult = qldbsession.CommitTransactionResult{
TransactionId: &mockTransactionID,
CommitDigest: mockHash,
}
var testISE = awserr.New(qldbsession.ErrCodeInvalidSessionException, "Invalid session", nil)
var testOCC = awserr.New(qldbsession.ErrCodeOccConflictException, "OCC", nil)
var testBadReq = awserr.New(qldbsession.ErrCodeBadRequestException, "Bad request", nil)
var test500 = awserr.New(http.StatusText(http.StatusInternalServerError), "Five Hundred", nil)
type mockSessionService struct {
mock.Mock
}
func (m *mockSessionService) abortTransaction(ctx context.Context) (*qldbsession.AbortTransactionResult, error) {
args := m.Called(ctx)
return args.Get(0).(*qldbsession.AbortTransactionResult), args.Error(1)
}
func (m *mockSessionService) commitTransaction(ctx context.Context, txnID *string, commitDigest []byte) (*qldbsession.CommitTransactionResult, error) {
args := m.Called(ctx, txnID, commitDigest)
return args.Get(0).(*qldbsession.CommitTransactionResult), args.Error(1)
}
func (m *mockSessionService) executeStatement(ctx context.Context, statement *string, parameters []*qldbsession.ValueHolder, txnID *string) (*qldbsession.ExecuteStatementResult, error) {
args := m.Called(ctx, statement, parameters, txnID)
return args.Get(0).(*qldbsession.ExecuteStatementResult), args.Error(1)
}
func (m *mockSessionService) endSession(ctx context.Context) (*qldbsession.EndSessionResult, error) {
args := m.Called(ctx)
return args.Get(0).(*qldbsession.EndSessionResult), args.Error(1)
}
func (m *mockSessionService) fetchPage(ctx context.Context, pageToken *string, txnID *string) (*qldbsession.FetchPageResult, error) {
panic("not used")
}
func (m *mockSessionService) startTransaction(ctx context.Context) (*qldbsession.StartTransactionResult, error) {
args := m.Called(ctx)
return args.Get(0).(*qldbsession.StartTransactionResult), args.Error(1)
}
| TestSessionStartTransaction |
cluster_node_status.rs | #[allow(unused_imports)]
use serde_json::Value;
#[derive(Debug, Serialize, Deserialize)]
pub struct | {
/// Battery status information.
#[serde(rename = "batterystatus")]
pub batterystatus: Option<crate::models::NodeStatusNodeBatterystatus>,
/// Storage capacity of this node.
#[serde(rename = "capacity")]
pub capacity: Option<Vec<crate::models::NodeStatusNodeCapacityItem>>,
/// CPU status information for this node.
#[serde(rename = "cpu")]
pub cpu: Option<crate::models::NodeStatusNodeCpu>,
/// Node NVRAM information.
#[serde(rename = "nvram")]
pub nvram: Option<crate::models::NodeStatusNodeNvram>,
/// Information about this node's power supplies.
#[serde(rename = "powersupplies")]
pub powersupplies: Option<crate::models::NodeStatusNodePowersupplies>,
/// OneFS release.
#[serde(rename = "release")]
pub release: Option<String>,
/// Seconds this node has been online.
#[serde(rename = "uptime")]
pub uptime: Option<i32>,
/// OneFS version.
#[serde(rename = "version")]
pub version: Option<String>,
}
| ClusterNodeStatus |
issue-10626.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Make sure that if a process doesn't have its stdio/stderr descriptors set up
// that we don't die in a large ball of fire
use std::os;
use std::old_io::process;
pub fn main () | {
let args = os::args();
let args = args.as_slice();
if args.len() > 1 && args[1].as_slice() == "child" {
for _ in 0..1000 {
println!("hello?");
}
for _ in 0..1000 {
println!("hello?");
}
return;
}
let mut p = process::Command::new(args[0].as_slice());
p.arg("child").stdout(process::Ignored).stderr(process::Ignored);
println!("{:?}", p.spawn().unwrap().wait());
} |
|
App.js | import React from 'react';
import {StyleSheet, Text, View, TouchableOpacity} from 'react-native';
import {SafeAreaView} from 'react-navigation';//not necessary,just use for sample
import {getDataMode, TYPE_CASH, TYPE_CREDIT, TYPE_MIX} from './components/view/DataMode';
// import RNSelectGroupButtonsModal from './components/view/RNSelectGroupButtonsModal';
import RNSelectGroupButtonsModal from "react-native-select-group-buttons-modal";
export default class | extends React.Component {
_settingBuild = {
backdropColor: '#303437',
backdropOpacity: 0.2,
animationIn: 'slideInUp',
animationTime: 400,//Default animation duration
hideOnBack: true,
modalTitle: '支付方式',
modalTips: '业务控制文字内容',
closeWithOutSideClick: true,//Click on the grey area to close the bullet window
};
constructor(props) {
super(props);
this.state = {
selectorVisible: false,
selectText: ''
};
}
initData = () => {
let data = [];
data[0] = getDataMode(false, TYPE_CREDIT, 600);
data[1] = getDataMode(true, TYPE_CASH, 0, 500);
data[2] = getDataMode(false, TYPE_MIX, 100, 200);
return data;
};
render() {
return (
<SafeAreaView style={styles.container}>
<Text>Open up App.js to start working on your app!</Text>
<TouchableOpacity onPress={() => {
this.setState({
selectorVisible: !this.state.selectorVisible
})
}}>
<Text>press show modal</Text>
<Text>{this.state.selectText}</Text>
</TouchableOpacity>
<RNSelectGroupButtonsModal
settingBuild={this._settingBuild}
onPaymentModeChanged={(item, index) => {
this.setState({
selectText: JSON.stringify(item)
})
}}
data={this.initData()}
closeButtonPress={(flag) => {
this.setState({
selectorVisible: flag
})
}}
selectorVisible={this.state.selectorVisible}/>
</SafeAreaView>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#fff',
},
});
| App |
input.rs | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
use std::fmt::Write;
/// See [`CreateParallelDataInput`](crate::input::CreateParallelDataInput)
pub mod create_parallel_data_input {
/// A builder for [`CreateParallelDataInput`](crate::input::CreateParallelDataInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) description: std::option::Option<std::string::String>,
pub(crate) parallel_data_config: std::option::Option<crate::model::ParallelDataConfig>,
pub(crate) encryption_key: std::option::Option<crate::model::EncryptionKey>,
pub(crate) client_token: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>A custom name for the parallel data resource in Amazon Translate. You must assign a name that is unique in the account and region.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>A custom name for the parallel data resource in Amazon Translate. You must assign a name that is unique in the account and region.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>A custom description for the parallel data resource in Amazon Translate.</p>
pub fn description(mut self, input: impl Into<std::string::String>) -> Self {
self.description = Some(input.into());
self
}
/// <p>A custom description for the parallel data resource in Amazon Translate.</p>
pub fn set_description(mut self, input: std::option::Option<std::string::String>) -> Self {
self.description = input;
self
}
/// <p>Specifies the format and S3 location of the parallel data input file.</p>
pub fn parallel_data_config(mut self, input: crate::model::ParallelDataConfig) -> Self {
self.parallel_data_config = Some(input);
self
}
/// <p>Specifies the format and S3 location of the parallel data input file.</p>
pub fn set_parallel_data_config(
mut self,
input: std::option::Option<crate::model::ParallelDataConfig>,
) -> Self {
self.parallel_data_config = input;
self
}
/// <p>The encryption key used to encrypt this object.</p>
pub fn encryption_key(mut self, input: crate::model::EncryptionKey) -> Self {
self.encryption_key = Some(input);
self
}
/// <p>The encryption key used to encrypt this object.</p>
pub fn set_encryption_key(
mut self,
input: std::option::Option<crate::model::EncryptionKey>,
) -> Self {
self.encryption_key = input;
self
}
/// <p>A unique identifier for the request. This token is automatically generated when you use Amazon Translate through an AWS SDK.</p>
pub fn client_token(mut self, input: impl Into<std::string::String>) -> Self {
self.client_token = Some(input.into());
self
}
/// <p>A unique identifier for the request. This token is automatically generated when you use Amazon Translate through an AWS SDK.</p>
pub fn set_client_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.client_token = input;
self
}
/// Consumes the builder and constructs a [`CreateParallelDataInput`](crate::input::CreateParallelDataInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::CreateParallelDataInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::CreateParallelDataInput {
name: self.name,
description: self.description,
parallel_data_config: self.parallel_data_config,
encryption_key: self.encryption_key,
client_token: self.client_token,
})
}
}
}
#[doc(hidden)]
pub type CreateParallelDataInputOperationOutputAlias = crate::operation::CreateParallelData;
#[doc(hidden)]
pub type CreateParallelDataInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl CreateParallelDataInput {
/// Consumes the builder and constructs an Operation<[`CreateParallelData`](crate::operation::CreateParallelData)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
mut self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::CreateParallelData,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
if self.client_token.is_none() {
self.client_token = Some(_config.make_token.make_idempotency_token());
}
let mut request = {
fn uri_base(
_input: &crate::input::CreateParallelDataInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::CreateParallelDataInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"AWSShineFrontendService_20170701.CreateParallelData",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_create_parallel_data(&self)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::CreateParallelData::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"CreateParallelData",
"translate",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`CreateParallelDataInput`](crate::input::CreateParallelDataInput)
pub fn builder() -> crate::input::create_parallel_data_input::Builder {
crate::input::create_parallel_data_input::Builder::default()
}
}
/// See [`DeleteParallelDataInput`](crate::input::DeleteParallelDataInput)
pub mod delete_parallel_data_input {
/// A builder for [`DeleteParallelDataInput`](crate::input::DeleteParallelDataInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The name of the parallel data resource that is being deleted.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the parallel data resource that is being deleted.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// Consumes the builder and constructs a [`DeleteParallelDataInput`](crate::input::DeleteParallelDataInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::DeleteParallelDataInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::DeleteParallelDataInput { name: self.name })
}
}
}
#[doc(hidden)]
pub type DeleteParallelDataInputOperationOutputAlias = crate::operation::DeleteParallelData;
#[doc(hidden)]
pub type DeleteParallelDataInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl DeleteParallelDataInput {
/// Consumes the builder and constructs an Operation<[`DeleteParallelData`](crate::operation::DeleteParallelData)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::DeleteParallelData,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::DeleteParallelDataInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::DeleteParallelDataInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"AWSShineFrontendService_20170701.DeleteParallelData",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_delete_parallel_data(&self)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::DeleteParallelData::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"DeleteParallelData",
"translate",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`DeleteParallelDataInput`](crate::input::DeleteParallelDataInput)
pub fn builder() -> crate::input::delete_parallel_data_input::Builder {
crate::input::delete_parallel_data_input::Builder::default()
}
}
/// See [`DeleteTerminologyInput`](crate::input::DeleteTerminologyInput)
pub mod delete_terminology_input {
/// A builder for [`DeleteTerminologyInput`](crate::input::DeleteTerminologyInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The name of the custom terminology being deleted. </p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the custom terminology being deleted. </p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// Consumes the builder and constructs a [`DeleteTerminologyInput`](crate::input::DeleteTerminologyInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::DeleteTerminologyInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::DeleteTerminologyInput { name: self.name })
}
}
}
#[doc(hidden)]
pub type DeleteTerminologyInputOperationOutputAlias = crate::operation::DeleteTerminology;
#[doc(hidden)]
pub type DeleteTerminologyInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl DeleteTerminologyInput {
/// Consumes the builder and constructs an Operation<[`DeleteTerminology`](crate::operation::DeleteTerminology)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::DeleteTerminology,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::DeleteTerminologyInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::DeleteTerminologyInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"AWSShineFrontendService_20170701.DeleteTerminology",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_delete_terminology(&self)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::DeleteTerminology::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"DeleteTerminology",
"translate",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`DeleteTerminologyInput`](crate::input::DeleteTerminologyInput)
pub fn builder() -> crate::input::delete_terminology_input::Builder {
crate::input::delete_terminology_input::Builder::default()
}
}
/// See [`DescribeTextTranslationJobInput`](crate::input::DescribeTextTranslationJobInput)
pub mod describe_text_translation_job_input {
/// A builder for [`DescribeTextTranslationJobInput`](crate::input::DescribeTextTranslationJobInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) job_id: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The identifier that Amazon Translate generated for the job. The <code>StartTextTranslationJob</code> operation returns this identifier in its response.</p>
pub fn job_id(mut self, input: impl Into<std::string::String>) -> Self {
self.job_id = Some(input.into());
self
}
/// <p>The identifier that Amazon Translate generated for the job. The <code>StartTextTranslationJob</code> operation returns this identifier in its response.</p>
pub fn set_job_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.job_id = input;
self
}
/// Consumes the builder and constructs a [`DescribeTextTranslationJobInput`](crate::input::DescribeTextTranslationJobInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::DescribeTextTranslationJobInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::DescribeTextTranslationJobInput {
job_id: self.job_id,
})
}
}
}
#[doc(hidden)]
pub type DescribeTextTranslationJobInputOperationOutputAlias =
crate::operation::DescribeTextTranslationJob;
#[doc(hidden)]
pub type DescribeTextTranslationJobInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl DescribeTextTranslationJobInput {
/// Consumes the builder and constructs an Operation<[`DescribeTextTranslationJob`](crate::operation::DescribeTextTranslationJob)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::DescribeTextTranslationJob,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::DescribeTextTranslationJobInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::DescribeTextTranslationJobInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"AWSShineFrontendService_20170701.DescribeTextTranslationJob",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_describe_text_translation_job(&self)?
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::DescribeTextTranslationJob::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"DescribeTextTranslationJob",
"translate",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`DescribeTextTranslationJobInput`](crate::input::DescribeTextTranslationJobInput)
pub fn builder() -> crate::input::describe_text_translation_job_input::Builder {
crate::input::describe_text_translation_job_input::Builder::default()
}
}
/// See [`GetParallelDataInput`](crate::input::GetParallelDataInput)
pub mod get_parallel_data_input {
/// A builder for [`GetParallelDataInput`](crate::input::GetParallelDataInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The name of the parallel data resource that is being retrieved.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the parallel data resource that is being retrieved.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// Consumes the builder and constructs a [`GetParallelDataInput`](crate::input::GetParallelDataInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::GetParallelDataInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::GetParallelDataInput { name: self.name })
}
}
}
#[doc(hidden)]
pub type GetParallelDataInputOperationOutputAlias = crate::operation::GetParallelData;
#[doc(hidden)]
pub type GetParallelDataInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl GetParallelDataInput {
/// Consumes the builder and constructs an Operation<[`GetParallelData`](crate::operation::GetParallelData)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::GetParallelData,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::GetParallelDataInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::GetParallelDataInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"AWSShineFrontendService_20170701.GetParallelData",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_get_parallel_data(&self)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::GetParallelData::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"GetParallelData",
"translate",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`GetParallelDataInput`](crate::input::GetParallelDataInput)
pub fn builder() -> crate::input::get_parallel_data_input::Builder {
crate::input::get_parallel_data_input::Builder::default()
}
}
/// See [`GetTerminologyInput`](crate::input::GetTerminologyInput)
pub mod get_terminology_input {
/// A builder for [`GetTerminologyInput`](crate::input::GetTerminologyInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) terminology_data_format:
std::option::Option<crate::model::TerminologyDataFormat>,
}
impl Builder {
/// <p>The name of the custom terminology being retrieved.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the custom terminology being retrieved.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The data format of the custom terminology being retrieved.</p>
/// <p>If you don't specify this parameter, Amazon Translate returns a file that has the same format as the file that was imported to create the terminology. </p>
/// <p>If you specify this parameter when you retrieve a multi-directional terminology resource, you must specify the same format as that of the input file that was imported to create it. Otherwise, Amazon Translate throws an error.</p>
pub fn terminology_data_format(
mut self,
input: crate::model::TerminologyDataFormat,
) -> Self {
self.terminology_data_format = Some(input);
self
}
/// <p>The data format of the custom terminology being retrieved.</p>
/// <p>If you don't specify this parameter, Amazon Translate returns a file that has the same format as the file that was imported to create the terminology. </p>
/// <p>If you specify this parameter when you retrieve a multi-directional terminology resource, you must specify the same format as that of the input file that was imported to create it. Otherwise, Amazon Translate throws an error.</p>
pub fn set_terminology_data_format(
mut self,
input: std::option::Option<crate::model::TerminologyDataFormat>,
) -> Self {
self.terminology_data_format = input;
self
}
/// Consumes the builder and constructs a [`GetTerminologyInput`](crate::input::GetTerminologyInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::GetTerminologyInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::GetTerminologyInput {
name: self.name,
terminology_data_format: self.terminology_data_format,
})
}
}
}
#[doc(hidden)]
pub type GetTerminologyInputOperationOutputAlias = crate::operation::GetTerminology;
#[doc(hidden)]
pub type GetTerminologyInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl GetTerminologyInput {
/// Consumes the builder and constructs an Operation<[`GetTerminology`](crate::operation::GetTerminology)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::GetTerminology,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::GetTerminologyInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::GetTerminologyInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"AWSShineFrontendService_20170701.GetTerminology",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_get_terminology(&self)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::GetTerminology::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"GetTerminology",
"translate",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`GetTerminologyInput`](crate::input::GetTerminologyInput)
pub fn builder() -> crate::input::get_terminology_input::Builder {
crate::input::get_terminology_input::Builder::default()
}
}
/// See [`ImportTerminologyInput`](crate::input::ImportTerminologyInput)
pub mod import_terminology_input {
/// A builder for [`ImportTerminologyInput`](crate::input::ImportTerminologyInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) merge_strategy: std::option::Option<crate::model::MergeStrategy>,
pub(crate) description: std::option::Option<std::string::String>,
pub(crate) terminology_data: std::option::Option<crate::model::TerminologyData>,
pub(crate) encryption_key: std::option::Option<crate::model::EncryptionKey>,
}
impl Builder {
/// <p>The name of the custom terminology being imported.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the custom terminology being imported.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>The merge strategy of the custom terminology being imported. Currently, only the OVERWRITE merge strategy is supported. In this case, the imported terminology will overwrite an existing terminology of the same name.</p>
pub fn merge_strategy(mut self, input: crate::model::MergeStrategy) -> Self {
self.merge_strategy = Some(input);
self
}
/// <p>The merge strategy of the custom terminology being imported. Currently, only the OVERWRITE merge strategy is supported. In this case, the imported terminology will overwrite an existing terminology of the same name.</p>
pub fn set_merge_strategy(
mut self,
input: std::option::Option<crate::model::MergeStrategy>,
) -> Self {
self.merge_strategy = input;
self | }
/// <p>The description of the custom terminology being imported.</p>
pub fn description(mut self, input: impl Into<std::string::String>) -> Self {
self.description = Some(input.into());
self
}
/// <p>The description of the custom terminology being imported.</p>
pub fn set_description(mut self, input: std::option::Option<std::string::String>) -> Self {
self.description = input;
self
}
/// <p>The terminology data for the custom terminology being imported.</p>
pub fn terminology_data(mut self, input: crate::model::TerminologyData) -> Self {
self.terminology_data = Some(input);
self
}
/// <p>The terminology data for the custom terminology being imported.</p>
pub fn set_terminology_data(
mut self,
input: std::option::Option<crate::model::TerminologyData>,
) -> Self {
self.terminology_data = input;
self
}
/// <p>The encryption key for the custom terminology being imported.</p>
pub fn encryption_key(mut self, input: crate::model::EncryptionKey) -> Self {
self.encryption_key = Some(input);
self
}
/// <p>The encryption key for the custom terminology being imported.</p>
pub fn set_encryption_key(
mut self,
input: std::option::Option<crate::model::EncryptionKey>,
) -> Self {
self.encryption_key = input;
self
}
/// Consumes the builder and constructs a [`ImportTerminologyInput`](crate::input::ImportTerminologyInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::ImportTerminologyInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::ImportTerminologyInput {
name: self.name,
merge_strategy: self.merge_strategy,
description: self.description,
terminology_data: self.terminology_data,
encryption_key: self.encryption_key,
})
}
}
}
#[doc(hidden)]
pub type ImportTerminologyInputOperationOutputAlias = crate::operation::ImportTerminology;
#[doc(hidden)]
pub type ImportTerminologyInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl ImportTerminologyInput {
/// Consumes the builder and constructs an Operation<[`ImportTerminology`](crate::operation::ImportTerminology)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::ImportTerminology,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::ImportTerminologyInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::ImportTerminologyInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"AWSShineFrontendService_20170701.ImportTerminology",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_import_terminology(&self)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::ImportTerminology::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"ImportTerminology",
"translate",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`ImportTerminologyInput`](crate::input::ImportTerminologyInput)
pub fn builder() -> crate::input::import_terminology_input::Builder {
crate::input::import_terminology_input::Builder::default()
}
}
/// See [`ListParallelDataInput`](crate::input::ListParallelDataInput)
pub mod list_parallel_data_input {
/// A builder for [`ListParallelDataInput`](crate::input::ListParallelDataInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) next_token: std::option::Option<std::string::String>,
pub(crate) max_results: std::option::Option<i32>,
}
impl Builder {
/// <p>A string that specifies the next page of results to return in a paginated response.</p>
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.next_token = Some(input.into());
self
}
/// <p>A string that specifies the next page of results to return in a paginated response.</p>
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.next_token = input;
self
}
/// <p>The maximum number of parallel data resources returned for each request.</p>
pub fn max_results(mut self, input: i32) -> Self {
self.max_results = Some(input);
self
}
/// <p>The maximum number of parallel data resources returned for each request.</p>
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.max_results = input;
self
}
/// Consumes the builder and constructs a [`ListParallelDataInput`](crate::input::ListParallelDataInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::ListParallelDataInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::ListParallelDataInput {
next_token: self.next_token,
max_results: self.max_results,
})
}
}
}
#[doc(hidden)]
pub type ListParallelDataInputOperationOutputAlias = crate::operation::ListParallelData;
#[doc(hidden)]
pub type ListParallelDataInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl ListParallelDataInput {
/// Consumes the builder and constructs an Operation<[`ListParallelData`](crate::operation::ListParallelData)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::ListParallelData,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::ListParallelDataInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::ListParallelDataInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"AWSShineFrontendService_20170701.ListParallelData",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_list_parallel_data(&self)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::ListParallelData::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"ListParallelData",
"translate",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`ListParallelDataInput`](crate::input::ListParallelDataInput)
pub fn builder() -> crate::input::list_parallel_data_input::Builder {
crate::input::list_parallel_data_input::Builder::default()
}
}
/// See [`ListTerminologiesInput`](crate::input::ListTerminologiesInput)
pub mod list_terminologies_input {
/// A builder for [`ListTerminologiesInput`](crate::input::ListTerminologiesInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) next_token: std::option::Option<std::string::String>,
pub(crate) max_results: std::option::Option<i32>,
}
impl Builder {
/// <p>If the result of the request to ListTerminologies was truncated, include the NextToken to fetch the next group of custom terminologies. </p>
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.next_token = Some(input.into());
self
}
/// <p>If the result of the request to ListTerminologies was truncated, include the NextToken to fetch the next group of custom terminologies. </p>
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.next_token = input;
self
}
/// <p>The maximum number of custom terminologies returned per list request.</p>
pub fn max_results(mut self, input: i32) -> Self {
self.max_results = Some(input);
self
}
/// <p>The maximum number of custom terminologies returned per list request.</p>
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.max_results = input;
self
}
/// Consumes the builder and constructs a [`ListTerminologiesInput`](crate::input::ListTerminologiesInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::ListTerminologiesInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::ListTerminologiesInput {
next_token: self.next_token,
max_results: self.max_results,
})
}
}
}
#[doc(hidden)]
pub type ListTerminologiesInputOperationOutputAlias = crate::operation::ListTerminologies;
#[doc(hidden)]
pub type ListTerminologiesInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl ListTerminologiesInput {
/// Consumes the builder and constructs an Operation<[`ListTerminologies`](crate::operation::ListTerminologies)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::ListTerminologies,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::ListTerminologiesInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::ListTerminologiesInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"AWSShineFrontendService_20170701.ListTerminologies",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_list_terminologies(&self)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::ListTerminologies::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"ListTerminologies",
"translate",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`ListTerminologiesInput`](crate::input::ListTerminologiesInput)
pub fn builder() -> crate::input::list_terminologies_input::Builder {
crate::input::list_terminologies_input::Builder::default()
}
}
/// See [`ListTextTranslationJobsInput`](crate::input::ListTextTranslationJobsInput)
pub mod list_text_translation_jobs_input {
/// A builder for [`ListTextTranslationJobsInput`](crate::input::ListTextTranslationJobsInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) filter: std::option::Option<crate::model::TextTranslationJobFilter>,
pub(crate) next_token: std::option::Option<std::string::String>,
pub(crate) max_results: std::option::Option<i32>,
}
impl Builder {
/// <p>The parameters that specify which batch translation jobs to retrieve. Filters include job name, job status, and submission time. You can only set one filter at a time.</p>
pub fn filter(mut self, input: crate::model::TextTranslationJobFilter) -> Self {
self.filter = Some(input);
self
}
/// <p>The parameters that specify which batch translation jobs to retrieve. Filters include job name, job status, and submission time. You can only set one filter at a time.</p>
pub fn set_filter(
mut self,
input: std::option::Option<crate::model::TextTranslationJobFilter>,
) -> Self {
self.filter = input;
self
}
/// <p>The token to request the next page of results.</p>
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.next_token = Some(input.into());
self
}
/// <p>The token to request the next page of results.</p>
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.next_token = input;
self
}
/// <p>The maximum number of results to return in each page. The default value is 100.</p>
pub fn max_results(mut self, input: i32) -> Self {
self.max_results = Some(input);
self
}
/// <p>The maximum number of results to return in each page. The default value is 100.</p>
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.max_results = input;
self
}
/// Consumes the builder and constructs a [`ListTextTranslationJobsInput`](crate::input::ListTextTranslationJobsInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::ListTextTranslationJobsInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::ListTextTranslationJobsInput {
filter: self.filter,
next_token: self.next_token,
max_results: self.max_results,
})
}
}
}
#[doc(hidden)]
pub type ListTextTranslationJobsInputOperationOutputAlias =
crate::operation::ListTextTranslationJobs;
#[doc(hidden)]
pub type ListTextTranslationJobsInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl ListTextTranslationJobsInput {
/// Consumes the builder and constructs an Operation<[`ListTextTranslationJobs`](crate::operation::ListTextTranslationJobs)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::ListTextTranslationJobs,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::ListTextTranslationJobsInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::ListTextTranslationJobsInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"AWSShineFrontendService_20170701.ListTextTranslationJobs",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_list_text_translation_jobs(
&self,
)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::ListTextTranslationJobs::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"ListTextTranslationJobs",
"translate",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`ListTextTranslationJobsInput`](crate::input::ListTextTranslationJobsInput)
pub fn builder() -> crate::input::list_text_translation_jobs_input::Builder {
crate::input::list_text_translation_jobs_input::Builder::default()
}
}
/// See [`StartTextTranslationJobInput`](crate::input::StartTextTranslationJobInput)
pub mod start_text_translation_job_input {
/// A builder for [`StartTextTranslationJobInput`](crate::input::StartTextTranslationJobInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) job_name: std::option::Option<std::string::String>,
pub(crate) input_data_config: std::option::Option<crate::model::InputDataConfig>,
pub(crate) output_data_config: std::option::Option<crate::model::OutputDataConfig>,
pub(crate) data_access_role_arn: std::option::Option<std::string::String>,
pub(crate) source_language_code: std::option::Option<std::string::String>,
pub(crate) target_language_codes: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) terminology_names: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) parallel_data_names: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) client_token: std::option::Option<std::string::String>,
pub(crate) settings: std::option::Option<crate::model::TranslationSettings>,
}
impl Builder {
/// <p>The name of the batch translation job to be performed.</p>
pub fn job_name(mut self, input: impl Into<std::string::String>) -> Self {
self.job_name = Some(input.into());
self
}
/// <p>The name of the batch translation job to be performed.</p>
pub fn set_job_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.job_name = input;
self
}
/// <p>Specifies the format and S3 location of the input documents for the translation job.</p>
pub fn input_data_config(mut self, input: crate::model::InputDataConfig) -> Self {
self.input_data_config = Some(input);
self
}
/// <p>Specifies the format and S3 location of the input documents for the translation job.</p>
pub fn set_input_data_config(
mut self,
input: std::option::Option<crate::model::InputDataConfig>,
) -> Self {
self.input_data_config = input;
self
}
/// <p>Specifies the S3 folder to which your job output will be saved. </p>
pub fn output_data_config(mut self, input: crate::model::OutputDataConfig) -> Self {
self.output_data_config = Some(input);
self
}
/// <p>Specifies the S3 folder to which your job output will be saved. </p>
pub fn set_output_data_config(
mut self,
input: std::option::Option<crate::model::OutputDataConfig>,
) -> Self {
self.output_data_config = input;
self
}
/// <p>The Amazon Resource Name (ARN) of an AWS Identity Access and Management (IAM) role that grants Amazon Translate read access to your input data. For more information, see <code>identity-and-access-management</code>.</p>
pub fn data_access_role_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.data_access_role_arn = Some(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of an AWS Identity Access and Management (IAM) role that grants Amazon Translate read access to your input data. For more information, see <code>identity-and-access-management</code>.</p>
pub fn set_data_access_role_arn(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.data_access_role_arn = input;
self
}
/// <p>The language code of the input language. For a list of language codes, see <code>what-is-languages</code>.</p>
/// <p>Amazon Translate does not automatically detect a source language during batch translation jobs.</p>
pub fn source_language_code(mut self, input: impl Into<std::string::String>) -> Self {
self.source_language_code = Some(input.into());
self
}
/// <p>The language code of the input language. For a list of language codes, see <code>what-is-languages</code>.</p>
/// <p>Amazon Translate does not automatically detect a source language during batch translation jobs.</p>
pub fn set_source_language_code(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.source_language_code = input;
self
}
/// Appends an item to `target_language_codes`.
///
/// To override the contents of this collection use [`set_target_language_codes`](Self::set_target_language_codes).
///
/// <p>The language code of the output language.</p>
pub fn target_language_codes(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.target_language_codes.unwrap_or_default();
v.push(input.into());
self.target_language_codes = Some(v);
self
}
/// <p>The language code of the output language.</p>
pub fn set_target_language_codes(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.target_language_codes = input;
self
}
/// Appends an item to `terminology_names`.
///
/// To override the contents of this collection use [`set_terminology_names`](Self::set_terminology_names).
///
/// <p>The name of a custom terminology resource to add to the translation job. This resource lists examples source terms and the desired translation for each term.</p>
/// <p>This parameter accepts only one custom terminology resource.</p>
/// <p>For a list of available custom terminology resources, use the <code>ListTerminologies</code> operation.</p>
/// <p>For more information, see <code>how-custom-terminology</code>.</p>
pub fn terminology_names(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.terminology_names.unwrap_or_default();
v.push(input.into());
self.terminology_names = Some(v);
self
}
/// <p>The name of a custom terminology resource to add to the translation job. This resource lists examples source terms and the desired translation for each term.</p>
/// <p>This parameter accepts only one custom terminology resource.</p>
/// <p>For a list of available custom terminology resources, use the <code>ListTerminologies</code> operation.</p>
/// <p>For more information, see <code>how-custom-terminology</code>.</p>
pub fn set_terminology_names(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.terminology_names = input;
self
}
/// Appends an item to `parallel_data_names`.
///
/// To override the contents of this collection use [`set_parallel_data_names`](Self::set_parallel_data_names).
///
/// <p>The name of a parallel data resource to add to the translation job. This resource consists of examples that show how you want segments of text to be translated. When you add parallel data to a translation job, you create an <i>Active Custom Translation</i> job. </p>
/// <p>This parameter accepts only one parallel data resource.</p> <note>
/// <p>Active Custom Translation jobs are priced at a higher rate than other jobs that don't use parallel data. For more information, see <a href="http://aws.amazon.com/translate/pricing/">Amazon Translate pricing</a>.</p>
/// </note>
/// <p>For a list of available parallel data resources, use the <code>ListParallelData</code> operation.</p>
/// <p>For more information, see <code>customizing-translations-parallel-data</code>.</p>
pub fn parallel_data_names(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.parallel_data_names.unwrap_or_default();
v.push(input.into());
self.parallel_data_names = Some(v);
self
}
/// <p>The name of a parallel data resource to add to the translation job. This resource consists of examples that show how you want segments of text to be translated. When you add parallel data to a translation job, you create an <i>Active Custom Translation</i> job. </p>
/// <p>This parameter accepts only one parallel data resource.</p> <note>
/// <p>Active Custom Translation jobs are priced at a higher rate than other jobs that don't use parallel data. For more information, see <a href="http://aws.amazon.com/translate/pricing/">Amazon Translate pricing</a>.</p>
/// </note>
/// <p>For a list of available parallel data resources, use the <code>ListParallelData</code> operation.</p>
/// <p>For more information, see <code>customizing-translations-parallel-data</code>.</p>
pub fn set_parallel_data_names(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.parallel_data_names = input;
self
}
/// <p>A unique identifier for the request. This token is auto-generated when using the Amazon Translate SDK.</p>
pub fn client_token(mut self, input: impl Into<std::string::String>) -> Self {
self.client_token = Some(input.into());
self
}
/// <p>A unique identifier for the request. This token is auto-generated when using the Amazon Translate SDK.</p>
pub fn set_client_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.client_token = input;
self
}
/// <p>Settings to configure your translation output, including the option to mask profane words and phrases.</p>
pub fn settings(mut self, input: crate::model::TranslationSettings) -> Self {
self.settings = Some(input);
self
}
/// <p>Settings to configure your translation output, including the option to mask profane words and phrases.</p>
pub fn set_settings(
mut self,
input: std::option::Option<crate::model::TranslationSettings>,
) -> Self {
self.settings = input;
self
}
/// Consumes the builder and constructs a [`StartTextTranslationJobInput`](crate::input::StartTextTranslationJobInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::StartTextTranslationJobInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::StartTextTranslationJobInput {
job_name: self.job_name,
input_data_config: self.input_data_config,
output_data_config: self.output_data_config,
data_access_role_arn: self.data_access_role_arn,
source_language_code: self.source_language_code,
target_language_codes: self.target_language_codes,
terminology_names: self.terminology_names,
parallel_data_names: self.parallel_data_names,
client_token: self.client_token,
settings: self.settings,
})
}
}
}
#[doc(hidden)]
pub type StartTextTranslationJobInputOperationOutputAlias =
crate::operation::StartTextTranslationJob;
#[doc(hidden)]
pub type StartTextTranslationJobInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl StartTextTranslationJobInput {
/// Consumes the builder and constructs an Operation<[`StartTextTranslationJob`](crate::operation::StartTextTranslationJob)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
mut self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::StartTextTranslationJob,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
if self.client_token.is_none() {
self.client_token = Some(_config.make_token.make_idempotency_token());
}
let mut request = {
fn uri_base(
_input: &crate::input::StartTextTranslationJobInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::StartTextTranslationJobInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"AWSShineFrontendService_20170701.StartTextTranslationJob",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_start_text_translation_job(
&self,
)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::StartTextTranslationJob::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"StartTextTranslationJob",
"translate",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`StartTextTranslationJobInput`](crate::input::StartTextTranslationJobInput)
pub fn builder() -> crate::input::start_text_translation_job_input::Builder {
crate::input::start_text_translation_job_input::Builder::default()
}
}
/// See [`StopTextTranslationJobInput`](crate::input::StopTextTranslationJobInput)
pub mod stop_text_translation_job_input {
/// A builder for [`StopTextTranslationJobInput`](crate::input::StopTextTranslationJobInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) job_id: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The job ID of the job to be stopped.</p>
pub fn job_id(mut self, input: impl Into<std::string::String>) -> Self {
self.job_id = Some(input.into());
self
}
/// <p>The job ID of the job to be stopped.</p>
pub fn set_job_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.job_id = input;
self
}
/// Consumes the builder and constructs a [`StopTextTranslationJobInput`](crate::input::StopTextTranslationJobInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::StopTextTranslationJobInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::StopTextTranslationJobInput {
job_id: self.job_id,
})
}
}
}
#[doc(hidden)]
pub type StopTextTranslationJobInputOperationOutputAlias = crate::operation::StopTextTranslationJob;
#[doc(hidden)]
pub type StopTextTranslationJobInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl StopTextTranslationJobInput {
/// Consumes the builder and constructs an Operation<[`StopTextTranslationJob`](crate::operation::StopTextTranslationJob)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::StopTextTranslationJob,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::StopTextTranslationJobInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::StopTextTranslationJobInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"AWSShineFrontendService_20170701.StopTextTranslationJob",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_stop_text_translation_job(
&self,
)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::StopTextTranslationJob::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"StopTextTranslationJob",
"translate",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`StopTextTranslationJobInput`](crate::input::StopTextTranslationJobInput)
pub fn builder() -> crate::input::stop_text_translation_job_input::Builder {
crate::input::stop_text_translation_job_input::Builder::default()
}
}
/// See [`TranslateTextInput`](crate::input::TranslateTextInput)
pub mod translate_text_input {
/// A builder for [`TranslateTextInput`](crate::input::TranslateTextInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) text: std::option::Option<std::string::String>,
pub(crate) terminology_names: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) source_language_code: std::option::Option<std::string::String>,
pub(crate) target_language_code: std::option::Option<std::string::String>,
pub(crate) settings: std::option::Option<crate::model::TranslationSettings>,
}
impl Builder {
/// <p>The text to translate. The text string can be a maximum of 5,000 bytes long. Depending on your character set, this may be fewer than 5,000 characters.</p>
pub fn text(mut self, input: impl Into<std::string::String>) -> Self {
self.text = Some(input.into());
self
}
/// <p>The text to translate. The text string can be a maximum of 5,000 bytes long. Depending on your character set, this may be fewer than 5,000 characters.</p>
pub fn set_text(mut self, input: std::option::Option<std::string::String>) -> Self {
self.text = input;
self
}
/// Appends an item to `terminology_names`.
///
/// To override the contents of this collection use [`set_terminology_names`](Self::set_terminology_names).
///
/// <p>The name of the terminology list file to be used in the TranslateText request. You can use 1 terminology list at most in a <code>TranslateText</code> request. Terminology lists can contain a maximum of 256 terms.</p>
pub fn terminology_names(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.terminology_names.unwrap_or_default();
v.push(input.into());
self.terminology_names = Some(v);
self
}
/// <p>The name of the terminology list file to be used in the TranslateText request. You can use 1 terminology list at most in a <code>TranslateText</code> request. Terminology lists can contain a maximum of 256 terms.</p>
pub fn set_terminology_names(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.terminology_names = input;
self
}
/// <p>The language code for the language of the source text. The language must be a language supported by Amazon Translate. For a list of language codes, see <code>what-is-languages</code>.</p>
/// <p>To have Amazon Translate determine the source language of your text, you can specify <code>auto</code> in the <code>SourceLanguageCode</code> field. If you specify <code>auto</code>, Amazon Translate will call <a href="https://docs.aws.amazon.com/comprehend/latest/dg/comprehend-general.html">Amazon Comprehend</a> to determine the source language.</p>
pub fn source_language_code(mut self, input: impl Into<std::string::String>) -> Self {
self.source_language_code = Some(input.into());
self
}
/// <p>The language code for the language of the source text. The language must be a language supported by Amazon Translate. For a list of language codes, see <code>what-is-languages</code>.</p>
/// <p>To have Amazon Translate determine the source language of your text, you can specify <code>auto</code> in the <code>SourceLanguageCode</code> field. If you specify <code>auto</code>, Amazon Translate will call <a href="https://docs.aws.amazon.com/comprehend/latest/dg/comprehend-general.html">Amazon Comprehend</a> to determine the source language.</p>
pub fn set_source_language_code(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.source_language_code = input;
self
}
/// <p>The language code requested for the language of the target text. The language must be a language supported by Amazon Translate.</p>
pub fn target_language_code(mut self, input: impl Into<std::string::String>) -> Self {
self.target_language_code = Some(input.into());
self
}
/// <p>The language code requested for the language of the target text. The language must be a language supported by Amazon Translate.</p>
pub fn set_target_language_code(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.target_language_code = input;
self
}
/// <p>Settings to configure your translation output, including the option to mask profane words and phrases.</p>
pub fn settings(mut self, input: crate::model::TranslationSettings) -> Self {
self.settings = Some(input);
self
}
/// <p>Settings to configure your translation output, including the option to mask profane words and phrases.</p>
pub fn set_settings(
mut self,
input: std::option::Option<crate::model::TranslationSettings>,
) -> Self {
self.settings = input;
self
}
/// Consumes the builder and constructs a [`TranslateTextInput`](crate::input::TranslateTextInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::TranslateTextInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::TranslateTextInput {
text: self.text,
terminology_names: self.terminology_names,
source_language_code: self.source_language_code,
target_language_code: self.target_language_code,
settings: self.settings,
})
}
}
}
#[doc(hidden)]
pub type TranslateTextInputOperationOutputAlias = crate::operation::TranslateText;
#[doc(hidden)]
pub type TranslateTextInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl TranslateTextInput {
/// Consumes the builder and constructs an Operation<[`TranslateText`](crate::operation::TranslateText)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::TranslateText,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::TranslateTextInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::TranslateTextInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"AWSShineFrontendService_20170701.TranslateText",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_translate_text(&self)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::TranslateText::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"TranslateText",
"translate",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`TranslateTextInput`](crate::input::TranslateTextInput)
pub fn builder() -> crate::input::translate_text_input::Builder {
crate::input::translate_text_input::Builder::default()
}
}
/// See [`UpdateParallelDataInput`](crate::input::UpdateParallelDataInput)
pub mod update_parallel_data_input {
/// A builder for [`UpdateParallelDataInput`](crate::input::UpdateParallelDataInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) name: std::option::Option<std::string::String>,
pub(crate) description: std::option::Option<std::string::String>,
pub(crate) parallel_data_config: std::option::Option<crate::model::ParallelDataConfig>,
pub(crate) client_token: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The name of the parallel data resource being updated.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.name = Some(input.into());
self
}
/// <p>The name of the parallel data resource being updated.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.name = input;
self
}
/// <p>A custom description for the parallel data resource in Amazon Translate.</p>
pub fn description(mut self, input: impl Into<std::string::String>) -> Self {
self.description = Some(input.into());
self
}
/// <p>A custom description for the parallel data resource in Amazon Translate.</p>
pub fn set_description(mut self, input: std::option::Option<std::string::String>) -> Self {
self.description = input;
self
}
/// <p>Specifies the format and S3 location of the parallel data input file.</p>
pub fn parallel_data_config(mut self, input: crate::model::ParallelDataConfig) -> Self {
self.parallel_data_config = Some(input);
self
}
/// <p>Specifies the format and S3 location of the parallel data input file.</p>
pub fn set_parallel_data_config(
mut self,
input: std::option::Option<crate::model::ParallelDataConfig>,
) -> Self {
self.parallel_data_config = input;
self
}
/// <p>A unique identifier for the request. This token is automatically generated when you use Amazon Translate through an AWS SDK.</p>
pub fn client_token(mut self, input: impl Into<std::string::String>) -> Self {
self.client_token = Some(input.into());
self
}
/// <p>A unique identifier for the request. This token is automatically generated when you use Amazon Translate through an AWS SDK.</p>
pub fn set_client_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.client_token = input;
self
}
/// Consumes the builder and constructs a [`UpdateParallelDataInput`](crate::input::UpdateParallelDataInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::UpdateParallelDataInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::UpdateParallelDataInput {
name: self.name,
description: self.description,
parallel_data_config: self.parallel_data_config,
client_token: self.client_token,
})
}
}
}
#[doc(hidden)]
pub type UpdateParallelDataInputOperationOutputAlias = crate::operation::UpdateParallelData;
#[doc(hidden)]
pub type UpdateParallelDataInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl UpdateParallelDataInput {
/// Consumes the builder and constructs an Operation<[`UpdateParallelData`](crate::operation::UpdateParallelData)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
mut self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::UpdateParallelData,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
if self.client_token.is_none() {
self.client_token = Some(_config.make_token.make_idempotency_token());
}
let mut request = {
fn uri_base(
_input: &crate::input::UpdateParallelDataInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::UpdateParallelDataInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"AWSShineFrontendService_20170701.UpdateParallelData",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_update_parallel_data(&self)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::UpdateParallelData::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"UpdateParallelData",
"translate",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`UpdateParallelDataInput`](crate::input::UpdateParallelDataInput)
pub fn builder() -> crate::input::update_parallel_data_input::Builder {
crate::input::update_parallel_data_input::Builder::default()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct UpdateParallelDataInput {
/// <p>The name of the parallel data resource being updated.</p>
pub name: std::option::Option<std::string::String>,
/// <p>A custom description for the parallel data resource in Amazon Translate.</p>
pub description: std::option::Option<std::string::String>,
/// <p>Specifies the format and S3 location of the parallel data input file.</p>
pub parallel_data_config: std::option::Option<crate::model::ParallelDataConfig>,
/// <p>A unique identifier for the request. This token is automatically generated when you use Amazon Translate through an AWS SDK.</p>
pub client_token: std::option::Option<std::string::String>,
}
impl UpdateParallelDataInput {
/// <p>The name of the parallel data resource being updated.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>A custom description for the parallel data resource in Amazon Translate.</p>
pub fn description(&self) -> std::option::Option<&str> {
self.description.as_deref()
}
/// <p>Specifies the format and S3 location of the parallel data input file.</p>
pub fn parallel_data_config(&self) -> std::option::Option<&crate::model::ParallelDataConfig> {
self.parallel_data_config.as_ref()
}
/// <p>A unique identifier for the request. This token is automatically generated when you use Amazon Translate through an AWS SDK.</p>
pub fn client_token(&self) -> std::option::Option<&str> {
self.client_token.as_deref()
}
}
impl std::fmt::Debug for UpdateParallelDataInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("UpdateParallelDataInput");
formatter.field("name", &self.name);
formatter.field("description", &self.description);
formatter.field("parallel_data_config", &self.parallel_data_config);
formatter.field("client_token", &self.client_token);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct TranslateTextInput {
/// <p>The text to translate. The text string can be a maximum of 5,000 bytes long. Depending on your character set, this may be fewer than 5,000 characters.</p>
pub text: std::option::Option<std::string::String>,
/// <p>The name of the terminology list file to be used in the TranslateText request. You can use 1 terminology list at most in a <code>TranslateText</code> request. Terminology lists can contain a maximum of 256 terms.</p>
pub terminology_names: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>The language code for the language of the source text. The language must be a language supported by Amazon Translate. For a list of language codes, see <code>what-is-languages</code>.</p>
/// <p>To have Amazon Translate determine the source language of your text, you can specify <code>auto</code> in the <code>SourceLanguageCode</code> field. If you specify <code>auto</code>, Amazon Translate will call <a href="https://docs.aws.amazon.com/comprehend/latest/dg/comprehend-general.html">Amazon Comprehend</a> to determine the source language.</p>
pub source_language_code: std::option::Option<std::string::String>,
/// <p>The language code requested for the language of the target text. The language must be a language supported by Amazon Translate.</p>
pub target_language_code: std::option::Option<std::string::String>,
/// <p>Settings to configure your translation output, including the option to mask profane words and phrases.</p>
pub settings: std::option::Option<crate::model::TranslationSettings>,
}
impl TranslateTextInput {
/// <p>The text to translate. The text string can be a maximum of 5,000 bytes long. Depending on your character set, this may be fewer than 5,000 characters.</p>
pub fn text(&self) -> std::option::Option<&str> {
self.text.as_deref()
}
/// <p>The name of the terminology list file to be used in the TranslateText request. You can use 1 terminology list at most in a <code>TranslateText</code> request. Terminology lists can contain a maximum of 256 terms.</p>
pub fn terminology_names(&self) -> std::option::Option<&[std::string::String]> {
self.terminology_names.as_deref()
}
/// <p>The language code for the language of the source text. The language must be a language supported by Amazon Translate. For a list of language codes, see <code>what-is-languages</code>.</p>
/// <p>To have Amazon Translate determine the source language of your text, you can specify <code>auto</code> in the <code>SourceLanguageCode</code> field. If you specify <code>auto</code>, Amazon Translate will call <a href="https://docs.aws.amazon.com/comprehend/latest/dg/comprehend-general.html">Amazon Comprehend</a> to determine the source language.</p>
pub fn source_language_code(&self) -> std::option::Option<&str> {
self.source_language_code.as_deref()
}
/// <p>The language code requested for the language of the target text. The language must be a language supported by Amazon Translate.</p>
pub fn target_language_code(&self) -> std::option::Option<&str> {
self.target_language_code.as_deref()
}
/// <p>Settings to configure your translation output, including the option to mask profane words and phrases.</p>
pub fn settings(&self) -> std::option::Option<&crate::model::TranslationSettings> {
self.settings.as_ref()
}
}
impl std::fmt::Debug for TranslateTextInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("TranslateTextInput");
formatter.field("text", &self.text);
formatter.field("terminology_names", &self.terminology_names);
formatter.field("source_language_code", &self.source_language_code);
formatter.field("target_language_code", &self.target_language_code);
formatter.field("settings", &self.settings);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct StopTextTranslationJobInput {
/// <p>The job ID of the job to be stopped.</p>
pub job_id: std::option::Option<std::string::String>,
}
impl StopTextTranslationJobInput {
/// <p>The job ID of the job to be stopped.</p>
pub fn job_id(&self) -> std::option::Option<&str> {
self.job_id.as_deref()
}
}
impl std::fmt::Debug for StopTextTranslationJobInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("StopTextTranslationJobInput");
formatter.field("job_id", &self.job_id);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct StartTextTranslationJobInput {
/// <p>The name of the batch translation job to be performed.</p>
pub job_name: std::option::Option<std::string::String>,
/// <p>Specifies the format and S3 location of the input documents for the translation job.</p>
pub input_data_config: std::option::Option<crate::model::InputDataConfig>,
/// <p>Specifies the S3 folder to which your job output will be saved. </p>
pub output_data_config: std::option::Option<crate::model::OutputDataConfig>,
/// <p>The Amazon Resource Name (ARN) of an AWS Identity Access and Management (IAM) role that grants Amazon Translate read access to your input data. For more information, see <code>identity-and-access-management</code>.</p>
pub data_access_role_arn: std::option::Option<std::string::String>,
/// <p>The language code of the input language. For a list of language codes, see <code>what-is-languages</code>.</p>
/// <p>Amazon Translate does not automatically detect a source language during batch translation jobs.</p>
pub source_language_code: std::option::Option<std::string::String>,
/// <p>The language code of the output language.</p>
pub target_language_codes: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>The name of a custom terminology resource to add to the translation job. This resource lists examples source terms and the desired translation for each term.</p>
/// <p>This parameter accepts only one custom terminology resource.</p>
/// <p>For a list of available custom terminology resources, use the <code>ListTerminologies</code> operation.</p>
/// <p>For more information, see <code>how-custom-terminology</code>.</p>
pub terminology_names: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>The name of a parallel data resource to add to the translation job. This resource consists of examples that show how you want segments of text to be translated. When you add parallel data to a translation job, you create an <i>Active Custom Translation</i> job. </p>
/// <p>This parameter accepts only one parallel data resource.</p> <note>
/// <p>Active Custom Translation jobs are priced at a higher rate than other jobs that don't use parallel data. For more information, see <a href="http://aws.amazon.com/translate/pricing/">Amazon Translate pricing</a>.</p>
/// </note>
/// <p>For a list of available parallel data resources, use the <code>ListParallelData</code> operation.</p>
/// <p>For more information, see <code>customizing-translations-parallel-data</code>.</p>
pub parallel_data_names: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>A unique identifier for the request. This token is auto-generated when using the Amazon Translate SDK.</p>
pub client_token: std::option::Option<std::string::String>,
/// <p>Settings to configure your translation output, including the option to mask profane words and phrases.</p>
pub settings: std::option::Option<crate::model::TranslationSettings>,
}
impl StartTextTranslationJobInput {
/// <p>The name of the batch translation job to be performed.</p>
pub fn job_name(&self) -> std::option::Option<&str> {
self.job_name.as_deref()
}
/// <p>Specifies the format and S3 location of the input documents for the translation job.</p>
pub fn input_data_config(&self) -> std::option::Option<&crate::model::InputDataConfig> {
self.input_data_config.as_ref()
}
/// <p>Specifies the S3 folder to which your job output will be saved. </p>
pub fn output_data_config(&self) -> std::option::Option<&crate::model::OutputDataConfig> {
self.output_data_config.as_ref()
}
/// <p>The Amazon Resource Name (ARN) of an AWS Identity Access and Management (IAM) role that grants Amazon Translate read access to your input data. For more information, see <code>identity-and-access-management</code>.</p>
pub fn data_access_role_arn(&self) -> std::option::Option<&str> {
self.data_access_role_arn.as_deref()
}
/// <p>The language code of the input language. For a list of language codes, see <code>what-is-languages</code>.</p>
/// <p>Amazon Translate does not automatically detect a source language during batch translation jobs.</p>
pub fn source_language_code(&self) -> std::option::Option<&str> {
self.source_language_code.as_deref()
}
/// <p>The language code of the output language.</p>
pub fn target_language_codes(&self) -> std::option::Option<&[std::string::String]> {
self.target_language_codes.as_deref()
}
/// <p>The name of a custom terminology resource to add to the translation job. This resource lists examples source terms and the desired translation for each term.</p>
/// <p>This parameter accepts only one custom terminology resource.</p>
/// <p>For a list of available custom terminology resources, use the <code>ListTerminologies</code> operation.</p>
/// <p>For more information, see <code>how-custom-terminology</code>.</p>
pub fn terminology_names(&self) -> std::option::Option<&[std::string::String]> {
self.terminology_names.as_deref()
}
/// <p>The name of a parallel data resource to add to the translation job. This resource consists of examples that show how you want segments of text to be translated. When you add parallel data to a translation job, you create an <i>Active Custom Translation</i> job. </p>
/// <p>This parameter accepts only one parallel data resource.</p> <note>
/// <p>Active Custom Translation jobs are priced at a higher rate than other jobs that don't use parallel data. For more information, see <a href="http://aws.amazon.com/translate/pricing/">Amazon Translate pricing</a>.</p>
/// </note>
/// <p>For a list of available parallel data resources, use the <code>ListParallelData</code> operation.</p>
/// <p>For more information, see <code>customizing-translations-parallel-data</code>.</p>
pub fn parallel_data_names(&self) -> std::option::Option<&[std::string::String]> {
self.parallel_data_names.as_deref()
}
/// <p>A unique identifier for the request. This token is auto-generated when using the Amazon Translate SDK.</p>
pub fn client_token(&self) -> std::option::Option<&str> {
self.client_token.as_deref()
}
/// <p>Settings to configure your translation output, including the option to mask profane words and phrases.</p>
pub fn settings(&self) -> std::option::Option<&crate::model::TranslationSettings> {
self.settings.as_ref()
}
}
impl std::fmt::Debug for StartTextTranslationJobInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("StartTextTranslationJobInput");
formatter.field("job_name", &self.job_name);
formatter.field("input_data_config", &self.input_data_config);
formatter.field("output_data_config", &self.output_data_config);
formatter.field("data_access_role_arn", &self.data_access_role_arn);
formatter.field("source_language_code", &self.source_language_code);
formatter.field("target_language_codes", &self.target_language_codes);
formatter.field("terminology_names", &self.terminology_names);
formatter.field("parallel_data_names", &self.parallel_data_names);
formatter.field("client_token", &self.client_token);
formatter.field("settings", &self.settings);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ListTextTranslationJobsInput {
/// <p>The parameters that specify which batch translation jobs to retrieve. Filters include job name, job status, and submission time. You can only set one filter at a time.</p>
pub filter: std::option::Option<crate::model::TextTranslationJobFilter>,
/// <p>The token to request the next page of results.</p>
pub next_token: std::option::Option<std::string::String>,
/// <p>The maximum number of results to return in each page. The default value is 100.</p>
pub max_results: std::option::Option<i32>,
}
impl ListTextTranslationJobsInput {
/// <p>The parameters that specify which batch translation jobs to retrieve. Filters include job name, job status, and submission time. You can only set one filter at a time.</p>
pub fn filter(&self) -> std::option::Option<&crate::model::TextTranslationJobFilter> {
self.filter.as_ref()
}
/// <p>The token to request the next page of results.</p>
pub fn next_token(&self) -> std::option::Option<&str> {
self.next_token.as_deref()
}
/// <p>The maximum number of results to return in each page. The default value is 100.</p>
pub fn max_results(&self) -> std::option::Option<i32> {
self.max_results
}
}
impl std::fmt::Debug for ListTextTranslationJobsInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ListTextTranslationJobsInput");
formatter.field("filter", &self.filter);
formatter.field("next_token", &self.next_token);
formatter.field("max_results", &self.max_results);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ListTerminologiesInput {
/// <p>If the result of the request to ListTerminologies was truncated, include the NextToken to fetch the next group of custom terminologies. </p>
pub next_token: std::option::Option<std::string::String>,
/// <p>The maximum number of custom terminologies returned per list request.</p>
pub max_results: std::option::Option<i32>,
}
impl ListTerminologiesInput {
/// <p>If the result of the request to ListTerminologies was truncated, include the NextToken to fetch the next group of custom terminologies. </p>
pub fn next_token(&self) -> std::option::Option<&str> {
self.next_token.as_deref()
}
/// <p>The maximum number of custom terminologies returned per list request.</p>
pub fn max_results(&self) -> std::option::Option<i32> {
self.max_results
}
}
impl std::fmt::Debug for ListTerminologiesInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ListTerminologiesInput");
formatter.field("next_token", &self.next_token);
formatter.field("max_results", &self.max_results);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ListParallelDataInput {
/// <p>A string that specifies the next page of results to return in a paginated response.</p>
pub next_token: std::option::Option<std::string::String>,
/// <p>The maximum number of parallel data resources returned for each request.</p>
pub max_results: std::option::Option<i32>,
}
impl ListParallelDataInput {
/// <p>A string that specifies the next page of results to return in a paginated response.</p>
pub fn next_token(&self) -> std::option::Option<&str> {
self.next_token.as_deref()
}
/// <p>The maximum number of parallel data resources returned for each request.</p>
pub fn max_results(&self) -> std::option::Option<i32> {
self.max_results
}
}
impl std::fmt::Debug for ListParallelDataInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ListParallelDataInput");
formatter.field("next_token", &self.next_token);
formatter.field("max_results", &self.max_results);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ImportTerminologyInput {
/// <p>The name of the custom terminology being imported.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The merge strategy of the custom terminology being imported. Currently, only the OVERWRITE merge strategy is supported. In this case, the imported terminology will overwrite an existing terminology of the same name.</p>
pub merge_strategy: std::option::Option<crate::model::MergeStrategy>,
/// <p>The description of the custom terminology being imported.</p>
pub description: std::option::Option<std::string::String>,
/// <p>The terminology data for the custom terminology being imported.</p>
pub terminology_data: std::option::Option<crate::model::TerminologyData>,
/// <p>The encryption key for the custom terminology being imported.</p>
pub encryption_key: std::option::Option<crate::model::EncryptionKey>,
}
impl ImportTerminologyInput {
/// <p>The name of the custom terminology being imported.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The merge strategy of the custom terminology being imported. Currently, only the OVERWRITE merge strategy is supported. In this case, the imported terminology will overwrite an existing terminology of the same name.</p>
pub fn merge_strategy(&self) -> std::option::Option<&crate::model::MergeStrategy> {
self.merge_strategy.as_ref()
}
/// <p>The description of the custom terminology being imported.</p>
pub fn description(&self) -> std::option::Option<&str> {
self.description.as_deref()
}
/// <p>The terminology data for the custom terminology being imported.</p>
pub fn terminology_data(&self) -> std::option::Option<&crate::model::TerminologyData> {
self.terminology_data.as_ref()
}
/// <p>The encryption key for the custom terminology being imported.</p>
pub fn encryption_key(&self) -> std::option::Option<&crate::model::EncryptionKey> {
self.encryption_key.as_ref()
}
}
impl std::fmt::Debug for ImportTerminologyInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ImportTerminologyInput");
formatter.field("name", &self.name);
formatter.field("merge_strategy", &self.merge_strategy);
formatter.field("description", &self.description);
formatter.field("terminology_data", &self.terminology_data);
formatter.field("encryption_key", &self.encryption_key);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct GetTerminologyInput {
/// <p>The name of the custom terminology being retrieved.</p>
pub name: std::option::Option<std::string::String>,
/// <p>The data format of the custom terminology being retrieved.</p>
/// <p>If you don't specify this parameter, Amazon Translate returns a file that has the same format as the file that was imported to create the terminology. </p>
/// <p>If you specify this parameter when you retrieve a multi-directional terminology resource, you must specify the same format as that of the input file that was imported to create it. Otherwise, Amazon Translate throws an error.</p>
pub terminology_data_format: std::option::Option<crate::model::TerminologyDataFormat>,
}
impl GetTerminologyInput {
/// <p>The name of the custom terminology being retrieved.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>The data format of the custom terminology being retrieved.</p>
/// <p>If you don't specify this parameter, Amazon Translate returns a file that has the same format as the file that was imported to create the terminology. </p>
/// <p>If you specify this parameter when you retrieve a multi-directional terminology resource, you must specify the same format as that of the input file that was imported to create it. Otherwise, Amazon Translate throws an error.</p>
pub fn terminology_data_format(
&self,
) -> std::option::Option<&crate::model::TerminologyDataFormat> {
self.terminology_data_format.as_ref()
}
}
impl std::fmt::Debug for GetTerminologyInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("GetTerminologyInput");
formatter.field("name", &self.name);
formatter.field("terminology_data_format", &self.terminology_data_format);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct GetParallelDataInput {
/// <p>The name of the parallel data resource that is being retrieved.</p>
pub name: std::option::Option<std::string::String>,
}
impl GetParallelDataInput {
/// <p>The name of the parallel data resource that is being retrieved.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
}
impl std::fmt::Debug for GetParallelDataInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("GetParallelDataInput");
formatter.field("name", &self.name);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DescribeTextTranslationJobInput {
/// <p>The identifier that Amazon Translate generated for the job. The <code>StartTextTranslationJob</code> operation returns this identifier in its response.</p>
pub job_id: std::option::Option<std::string::String>,
}
impl DescribeTextTranslationJobInput {
/// <p>The identifier that Amazon Translate generated for the job. The <code>StartTextTranslationJob</code> operation returns this identifier in its response.</p>
pub fn job_id(&self) -> std::option::Option<&str> {
self.job_id.as_deref()
}
}
impl std::fmt::Debug for DescribeTextTranslationJobInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DescribeTextTranslationJobInput");
formatter.field("job_id", &self.job_id);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DeleteTerminologyInput {
/// <p>The name of the custom terminology being deleted. </p>
pub name: std::option::Option<std::string::String>,
}
impl DeleteTerminologyInput {
/// <p>The name of the custom terminology being deleted. </p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
}
impl std::fmt::Debug for DeleteTerminologyInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DeleteTerminologyInput");
formatter.field("name", &self.name);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DeleteParallelDataInput {
/// <p>The name of the parallel data resource that is being deleted.</p>
pub name: std::option::Option<std::string::String>,
}
impl DeleteParallelDataInput {
/// <p>The name of the parallel data resource that is being deleted.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
}
impl std::fmt::Debug for DeleteParallelDataInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DeleteParallelDataInput");
formatter.field("name", &self.name);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct CreateParallelDataInput {
/// <p>A custom name for the parallel data resource in Amazon Translate. You must assign a name that is unique in the account and region.</p>
pub name: std::option::Option<std::string::String>,
/// <p>A custom description for the parallel data resource in Amazon Translate.</p>
pub description: std::option::Option<std::string::String>,
/// <p>Specifies the format and S3 location of the parallel data input file.</p>
pub parallel_data_config: std::option::Option<crate::model::ParallelDataConfig>,
/// <p>The encryption key used to encrypt this object.</p>
pub encryption_key: std::option::Option<crate::model::EncryptionKey>,
/// <p>A unique identifier for the request. This token is automatically generated when you use Amazon Translate through an AWS SDK.</p>
pub client_token: std::option::Option<std::string::String>,
}
impl CreateParallelDataInput {
/// <p>A custom name for the parallel data resource in Amazon Translate. You must assign a name that is unique in the account and region.</p>
pub fn name(&self) -> std::option::Option<&str> {
self.name.as_deref()
}
/// <p>A custom description for the parallel data resource in Amazon Translate.</p>
pub fn description(&self) -> std::option::Option<&str> {
self.description.as_deref()
}
/// <p>Specifies the format and S3 location of the parallel data input file.</p>
pub fn parallel_data_config(&self) -> std::option::Option<&crate::model::ParallelDataConfig> {
self.parallel_data_config.as_ref()
}
/// <p>The encryption key used to encrypt this object.</p>
pub fn encryption_key(&self) -> std::option::Option<&crate::model::EncryptionKey> {
self.encryption_key.as_ref()
}
/// <p>A unique identifier for the request. This token is automatically generated when you use Amazon Translate through an AWS SDK.</p>
pub fn client_token(&self) -> std::option::Option<&str> {
self.client_token.as_deref()
}
}
impl std::fmt::Debug for CreateParallelDataInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("CreateParallelDataInput");
formatter.field("name", &self.name);
formatter.field("description", &self.description);
formatter.field("parallel_data_config", &self.parallel_data_config);
formatter.field("encryption_key", &self.encryption_key);
formatter.field("client_token", &self.client_token);
formatter.finish()
}
} | |
identity.js | var syrup = require('dmc-syrup')
var devutil = require('../../../../util/devutil')
var logger = require('../../../../util/logger')
module.exports = syrup.serial()
.dependency(require('../../support/properties'))
.dependency(require('./display'))
.dependency(require('./phone'))
.define(function(options, properties, display, phone) {
var log = logger.createLogger('device:plugins:identity')
function solve() {
log.info('Solving identity')
var identity = devutil.makeIdentity(options.serial, properties) | identity.phone = phone
return identity
}
return solve()
}) | identity.display = display.properties |
parallax.directive.ts | import { Content } from 'ionic-angular';
import { Directive, ElementRef, Input, Renderer2, SimpleChanges, AfterViewInit } from '@angular/core';
@Directive({
selector: '[scrollHide]'
})
export class | implements AfterViewInit {
@Input('scrollHide') config: ScrollHideConfig;
@Input('scrollContent') scrollContent: Content;
contentHeight: number;
scrollHeight: number;
lastScrollPosition: number;
lastValue: number = 0;
lastValueButton: number = 105;
button: HTMLElement;
constructor(private element: ElementRef, private renderer: Renderer2) {
}
ngAfterViewInit() {
this.button = document.getElementsByClassName('ion-md-leerQr')[0] as HTMLElement;
/*this.button.style.transformOrigin = 'bottom center';
this.button.style.paddingBottom = '5px';
console.log(this.button);*/
}
ngOnChanges(changes: SimpleChanges) {
if (this.scrollContent && this.config) {
this.scrollContent.ionScrollStart.subscribe((ev) => {
this.contentHeight = this.scrollContent.getScrollElement().offsetHeight;
this.scrollHeight = this.scrollContent.getScrollElement().scrollHeight;
if (this.config.maxValue === undefined) {
this.config.maxValue = this.element.nativeElement.offsetHeight;
}
this.lastScrollPosition = ev.scrollTop;
});
this.scrollContent.ionScroll.subscribe((ev) => this.adjustElementOnScroll(ev));
this.scrollContent.ionScrollEnd.subscribe((ev) => this.adjustElementOnScroll(ev));
}
}
private adjustElementOnScroll(ev) {
if (ev) {
ev.domWrite(() => {
let scrollTop: number = ev.scrollTop > 0 ? ev.scrollTop : 0;
let scrolldiff: number = (scrollTop - this.lastScrollPosition) / 1.75;
this.lastScrollPosition = scrollTop;
let newValue = this.lastValue + scrolldiff;
let newValueButton = this.lastValueButton - scrolldiff;
newValue = Math.max(0, Math.min(newValue, this.config.maxValue));
newValueButton = Math.max(64, Math.min(newValueButton, 105));
if (newValue === this.config.maxValue) {
this.renderer.addClass(this.element.nativeElement.children[0].children[0], 'compact');
} else {
this.renderer.removeClass(this.element.nativeElement.children[0].children[0], 'compact');
}
this.renderer.setStyle(this.element.nativeElement, this.config.cssProperty, `-${newValue}px`);
this.button.style.width = `${newValueButton}px`;
this.lastValue = newValue;
this.lastValueButton = newValueButton;
});
}
}
}
export interface ScrollHideConfig {
cssProperty: string;
maxValue: number;
} | ScrollHideDirective |
discovery_test.go | package plugins
import (
"io/ioutil"
"os"
"path/filepath"
"testing"
)
func Setup(t *testing.T) (string, func()) {
tmpdir, err := ioutil.TempDir("", "docker-test")
if err != nil {
t.Fatal(err)
}
backup := socketsPath
socketsPath = tmpdir
specsPaths = []string{tmpdir}
return tmpdir, func() {
socketsPath = backup
os.RemoveAll(tmpdir)
}
}
func | (t *testing.T) {
tmpdir, unregister := Setup(t)
defer unregister()
cases := []struct {
path string
name string
addr string
fail bool
}{
// TODO Windows: Factor out the unix:// variants.
{filepath.Join(tmpdir, "echo.spec"), "echo", "unix://var/lib/docker/plugins/echo.sock", false},
{filepath.Join(tmpdir, "echo", "echo.spec"), "echo", "unix://var/lib/docker/plugins/echo.sock", false},
{filepath.Join(tmpdir, "foo.spec"), "foo", "tcp://localhost:8080", false},
{filepath.Join(tmpdir, "foo", "foo.spec"), "foo", "tcp://localhost:8080", false},
{filepath.Join(tmpdir, "bar.spec"), "bar", "localhost:8080", true}, // unknown transport
}
for _, c := range cases {
if err := os.MkdirAll(filepath.Dir(c.path), 0755); err != nil {
t.Fatal(err)
}
if err := ioutil.WriteFile(c.path, []byte(c.addr), 0644); err != nil {
t.Fatal(err)
}
r := newLocalRegistry()
p, err := r.Plugin(c.name)
if c.fail && err == nil {
continue
}
if err != nil {
t.Fatal(err)
}
if p.name != c.name {
t.Fatalf("Expected plugin `%s`, got %s\n", c.name, p.Name)
}
if p.Addr != c.addr {
t.Fatalf("Expected plugin addr `%s`, got %s\n", c.addr, p.Addr)
}
if p.TLSConfig.InsecureSkipVerify != true {
t.Fatalf("Expected TLS verification to be skipped")
}
}
}
func TestFileJSONSpecPlugin(t *testing.T) {
tmpdir, unregister := Setup(t)
defer unregister()
p := filepath.Join(tmpdir, "example.json")
spec := `{
"Name": "plugin-example",
"Addr": "https://example.com/docker/plugin",
"TLSConfig": {
"CAFile": "/usr/shared/docker/certs/example-ca.pem",
"CertFile": "/usr/shared/docker/certs/example-cert.pem",
"KeyFile": "/usr/shared/docker/certs/example-key.pem"
}
}`
if err := ioutil.WriteFile(p, []byte(spec), 0644); err != nil {
t.Fatal(err)
}
r := newLocalRegistry()
plugin, err := r.Plugin("example")
if err != nil {
t.Fatal(err)
}
if plugin.name != "example" {
t.Fatalf("Expected plugin `plugin-example`, got %s\n", plugin.Name)
}
if plugin.Addr != "https://example.com/docker/plugin" {
t.Fatalf("Expected plugin addr `https://example.com/docker/plugin`, got %s\n", plugin.Addr)
}
if plugin.TLSConfig.CAFile != "/usr/shared/docker/certs/example-ca.pem" {
t.Fatalf("Expected plugin CA `/usr/shared/docker/certs/example-ca.pem`, got %s\n", plugin.TLSConfig.CAFile)
}
if plugin.TLSConfig.CertFile != "/usr/shared/docker/certs/example-cert.pem" {
t.Fatalf("Expected plugin Certificate `/usr/shared/docker/certs/example-cert.pem`, got %s\n", plugin.TLSConfig.CertFile)
}
if plugin.TLSConfig.KeyFile != "/usr/shared/docker/certs/example-key.pem" {
t.Fatalf("Expected plugin Key `/usr/shared/docker/certs/example-key.pem`, got %s\n", plugin.TLSConfig.KeyFile)
}
}
| TestFileSpecPlugin |
index.js | 'use strict';
const path = require('path');
const inquirer = require('inquirer');
const glob = require('glob');
const resolve = require('../resolve');
const getCwd = require('../getCwd');
const renderClear = require('../renderClear');
/**
* @param {Object} results - the results object
* @param {Object} di - dependency injection
* @returns {void}
**/
function fetchInitReadmes(results, { _renderClear, _getCwd, _path, _glob, _inquirer }) {
const cwd = _getCwd();
const readmeFiles = _glob
.sync(_path.join(cwd, '**', '*.md'), {
ignore: [_path.join(_getCwd(), 'node_modules', '**')],
})
.map(filePath => filePath.replace(cwd, '').replace(/^\//, ' '));
_renderClear();
return new Promise(pResolve => {
_inquirer
.prompt([
{
type: 'checkbox',
name: 'selectedReadmeFiles',
message: 'Select the readme files you like to use',
choices: readmeFiles.map(value => ({ value, checked: true })),
},
])
.then(({ selectedReadmeFiles }) => {
pResolve({
selectedReadmeFiles: selectedReadmeFiles.map(filePath => ({ path: filePath.replace(/^ /, '/') })),
});
});
});
}
module.exports = resolve(fetchInitReadmes, { renderClear, getCwd, path, glob, inquirer }); | module.exports.fetchInitReadmes = fetchInitReadmes; |
|
mul.py | """string multiplication"""
def | ():
a = 'hi'
b = a * 2
TestError( b == 'hihi' )
| main |
slice_mut.rs | /*!
Contains the ffi-safe equivalent of `&'a mut [T]`.
*/
use std::{
borrow::{Borrow,BorrowMut},
io::{self, Write},
marker::PhantomData,
mem,
ops::{Deref, DerefMut, Index, IndexMut},
};
use serde::{Serialize, Serializer};
#[allow(unused_imports)]
use core_extensions::prelude::*;
use crate::std_types::{RSlice, RVec};
mod privacy {
use super::*;
/**
Ffi-safe equivalent of `&'a mut [T]`
As of the writing this documentation the abi stability of `&mut [T]` is
not yet guaranteed.
# Lifetime problems
Because `RSliceMut` dereferences into a mutable slice,you can call slice methods on it.
If you call a slice method that returns a borrow into the slice,
it will have the lifetime of the `let slice: RSliceMut<'a,[T]>` variable instead of the `'a`
lifetime that it's parameterized over.
To get a slice with the same lifetime as an `RSliceMut`,
one must use one of the `RSliceMut::{into_slice,into_mut_slice}` methods.
Example of what would not work:
```compile_fail
use abi_stable::std_types::RSliceMut;
fn into_slice<'a,T>(slic:RSliceMut<'a,T>)->&'a [T] {
&*slic
}
fn into_mut_slice<'a,T>(slic:RSliceMut<'a,T>)->&'a mut [T] {
&mut *slic
}
```
Example of what would work:
```
use abi_stable::std_types::RSliceMut;
fn into_slice<'a,T>(slic:RSliceMut<'a,T>)->&'a [T] {
slic.into_slice()
}
fn into_mut_slice<'a,T>(slic:RSliceMut<'a,T>)->&'a mut [T] {
slic.into_mut_slice()
}
```
# Example
Defining an extern fn that returns a mutable reference to
the first element that compares equal to a parameter.
```
use abi_stable::{
std_types::RSliceMut,
sabi_extern_fn,
};
#[sabi_extern_fn]
pub fn find_first_mut<'a,T>(slice_:RSliceMut<'a,T>,element:&T)->Option<&'a mut T>
where
T:std::cmp::PartialEq
{
slice_.iter()
.position(|x| x==element )
.map(|i| &mut slice_.into_mut_slice()[i] )
}
```
*/
#[repr(C)]
#[derive(StableAbi)]
#[sabi(bound = "T:'a")]
pub struct | <'a, T> {
data: *mut T,
length: usize,
_marker: PhantomData<MutWorkaround<'a,T>>,
}
/// Used as a workaround to make `from_raw_parts_mut` a const fn.
#[repr(C)]
#[derive(StableAbi)]
#[sabi(bound = "T:'a")]
struct MutWorkaround<'a,T>(&'a mut T);
impl_from_rust_repr! {
impl['a, T] From<&'a mut [T]> for RSliceMut<'a, T> {
fn(this){
RSliceMut {
data: this.as_mut_ptr(),
length: this.len(),
_marker: Default::default(),
}
}
}
}
impl<'a, T> RSliceMut<'a, T> {
#[inline(always)]
pub(super) const fn data(&self) -> *mut T {
self.data
}
/// Gets a raw pointer to the start of the slice.
pub const fn as_ptr(&self) -> *const T{
self.data
}
/// Gets a mutable raw pointer to the start of the slice.
pub fn as_mut_ptr(&mut self) -> *mut T{
self.data
}
/// Gets a mutable raw pointer to the start of the slice.
pub const fn into_mut_ptr(self) -> *mut T{
self.data
}
/// The length (in elements) of this slice.
///
/// # Example
///
/// ```
/// use abi_stable::std_types::RSliceMut;
///
/// assert_eq!(RSliceMut::<u8>::from_mut_slice(&mut[]).len(), 0);
/// assert_eq!(RSliceMut::from_mut_slice(&mut[0]).len(), 1);
/// assert_eq!(RSliceMut::from_mut_slice(&mut[0,1]).len(), 2);
///
///
/// ```
#[inline(always)]
pub const fn len(&self) -> usize {
self.length
}
/// Whether this slice is empty.
///
/// # Example
///
/// ```
/// use abi_stable::std_types::RSliceMut;
///
/// assert_eq!(RSliceMut::<u8>::from_mut_slice(&mut []).is_empty(), true);
/// assert_eq!(RSliceMut::from_mut_slice(&mut [0]).is_empty(), false);
/// assert_eq!(RSliceMut::from_mut_slice(&mut [0,1]).is_empty(), false);
///
/// ```
#[inline]
pub const fn is_empty(&self) -> bool {
self.length==0
}
/// Constructs an `RSliceMut<'a,T>` from a pointer to the first element,
/// and a length.
///
/// # Safety
///
/// Callers must ensure that:
///
/// - `ptr_` points to valid memory,
///
/// - `ptr_ .. ptr+len` range is accessible memory.
///
/// - `ptr_` is aligned to `T`.
///
/// - the data `ptr_` points to must be valid for the lifetime of this `RSlice<'a,T>`
///
/// # Examples
///
/// This function unsafely converts a `&mut [T]` to an `RSliceMut<T>`,
/// equivalent to doing `RSliceMut::from_mut_slice`.
///
/// ```
/// use abi_stable::std_types::RSliceMut;
///
/// fn convert<T>(slice_:&mut [T])->RSliceMut<'_,T>{
/// let len=slice_.len();
/// unsafe{
/// RSliceMut::from_raw_parts_mut( slice_.as_mut_ptr(), len )
/// }
/// }
///
/// ```
pub const unsafe fn from_raw_parts_mut(ptr_: *mut T, len: usize) -> Self {
Self {
data: ptr_,
length: len,
// WHAT!?
// error[E0723]: mutable references in const fn are unstable (see issue #57563)
_marker: PhantomData,
}
}
}
}
pub use self::privacy::RSliceMut;
impl<'a, T> RSliceMut<'a, T> {
// pub const fn empty() -> Self {
// Self::EMPTY
// }
/// Converts a mutable reference to `T` to a single element `RSliceMut<'a,T>`.
///
/// Note:this function does not copy anything.
///
/// # Example
///
/// ```
/// use abi_stable::std_types::RSliceMut;
///
/// assert_eq!(RSliceMut::from_mut(&mut 0), RSliceMut::from_mut_slice(&mut [0]) );
/// assert_eq!(RSliceMut::from_mut(&mut 1), RSliceMut::from_mut_slice(&mut [1]) );
/// assert_eq!(RSliceMut::from_mut(&mut 2), RSliceMut::from_mut_slice(&mut [2]) );
///
///
/// ```
pub fn from_mut(ref_:&'a mut T)->Self{
unsafe{
Self::from_raw_parts_mut(ref_,1)
}
}
/// Converts a `&'a mut [T]` to a `RSliceMut<'a,T>`.
///
/// # Example
///
/// ```
/// use abi_stable::std_types::RSliceMut;
///
/// let empty:&mut [u8]=&mut [];
///
/// assert_eq!(RSliceMut::<u8>::from_mut_slice(&mut[]).as_mut_slice(), empty);
/// assert_eq!(RSliceMut::from_mut_slice(&mut[0]).as_mut_slice() , &mut [0][..]);
/// assert_eq!(RSliceMut::from_mut_slice(&mut[0,1]).as_mut_slice() , &mut [0,1][..]);
///
/// ```
#[inline]
pub fn from_mut_slice(slic:&'a mut [T])->Self{
slic.into()
}
/// Creates an `RSlice<'b,T>` with access to the `range` range of elements.
///
/// This is an inherent method instead of an implementation of the
/// `std::ops::Index` trait because it does not return a reference.
///
/// # Example
///
/// ```
/// use abi_stable::std_types::{RSliceMut,RSlice};
///
/// let slic=&mut[0,1,2,3];
/// let slic=RSliceMut::from_mut_slice(slic);
///
/// assert_eq!(slic.slice(..),RSlice::from_slice(&[0,1,2,3]));
/// assert_eq!(slic.slice(..2),RSlice::from_slice(&[0,1]));
/// assert_eq!(slic.slice(2..),RSlice::from_slice(&[2,3]));
/// assert_eq!(slic.slice(1..3),RSlice::from_slice(&[1,2]));
///
/// ```
#[allow(clippy::needless_lifetimes)]
pub fn slice<'b, I>(&'b self, i: I) -> RSlice<'b, T>
where
[T]: Index<I, Output = [T]>,
{
self.as_slice().index(i).into()
}
/// Creates an `RSliceMut<'a,T>` with access to the `range` range of elements.
///
/// This is an inherent method instead of an implementation of the
/// `std::ops::IndexMut` trait because it does not return a reference.
///
/// # Example
///
/// ```
/// use abi_stable::std_types::RSliceMut;
///
/// let slic=&mut[0,1,2,3];
/// let mut slic=RSliceMut::from_mut_slice(slic);
///
/// assert_eq!(slic.slice_mut(..),RSliceMut::from_mut_slice(&mut[0,1,2,3]));
/// assert_eq!(slic.slice_mut(..2),RSliceMut::from_mut_slice(&mut[0,1]));
/// assert_eq!(slic.slice_mut(2..),RSliceMut::from_mut_slice(&mut[2,3]));
/// assert_eq!(slic.slice_mut(1..3),RSliceMut::from_mut_slice(&mut[1,2]));
///
/// ```
#[allow(clippy::needless_lifetimes)]
pub fn slice_mut<'b, I>(&'b mut self, i: I) -> RSliceMut<'b, T>
where
[T]: IndexMut<I, Output = [T]>,
{
self.as_mut_slice().index_mut(i).into()
}
/// Creates a new `RVec<T>` and clones all the elements of this slice into it.
///
/// # Example
///
/// ```
/// use abi_stable::std_types::{RSliceMut,RVec};
///
/// let slic=&mut[0,1,2,3];
/// let slic=RSliceMut::from_mut_slice(slic);
///
/// assert_eq!(slic.slice(..).to_rvec(),RVec::from_slice(&[0,1,2,3]));
/// assert_eq!(slic.slice(..2).to_rvec(),RVec::from_slice(&[0,1]));
/// assert_eq!(slic.slice(2..).to_rvec(),RVec::from_slice(&[2,3]));
/// assert_eq!(slic.slice(1..3).to_rvec(),RVec::from_slice(&[1,2]));
///
/// ```
pub fn to_rvec(&self) -> RVec<T>
where
T: Clone,
{
self.to_vec().into()
}
unsafe fn as_slice_unbounded_lifetime(&self) -> &'a [T] {
::std::slice::from_raw_parts(self.data(), self.len())
}
unsafe fn as_mut_slice_unbounded_lifetime(&mut self) -> &'a mut [T] {
::std::slice::from_raw_parts_mut(self.data(), self.len())
}
/// Creates an `&'_ [T]` with access to all the elements of this slice.
///
/// # Example
///
/// ```
/// use abi_stable::std_types::RSliceMut;
///
/// assert_eq!(RSliceMut::from_mut_slice(&mut[0,1,2,3]).as_slice(), &[0,1,2,3]);
///
/// ```
pub fn as_slice(&self) -> &[T] {
unsafe { self.as_slice_unbounded_lifetime() }
}
/// Creates an `&'a [T]` with access to all the elements of this slice.
///
/// This is different to `as_slice` in that the returned lifetime of
/// this function is larger.
///
/// # Example
///
/// ```
/// use abi_stable::std_types::RSliceMut;
///
/// assert_eq!(RSliceMut::from_mut_slice(&mut[0,1,2,3]).into_slice(), &[0,1,2,3]);
///
/// ```
pub fn into_slice(self) -> &'a [T] {
unsafe { self.as_slice_unbounded_lifetime() }
}
/// Creates an `RSlice<'_, T>` with access to all the elements of this slice.
///
/// # Example
///
/// ```
/// use abi_stable::std_types::{RSliceMut,RSlice};
///
/// assert_eq!(
/// RSliceMut::from_mut_slice(&mut[0,1,2,3]).as_rslice(),
/// RSlice::from_slice(&[0,1,2,3]),
/// );
///
/// ```
pub fn as_rslice(&self) -> RSlice<'_, T> {
self.as_slice().into()
}
/// Creates an `RSlice<'a, T>` with access to all the elements of this slice.
///
/// This is different to `as_rslice` in that the returned lifetime of
/// this function is larger.
///
/// # Example
///
/// ```
/// use abi_stable::std_types::{RSliceMut,RSlice};
///
/// assert_eq!(
/// RSliceMut::from_mut_slice(&mut[0,1,2,3]).into_rslice(),
/// RSlice::from_slice(&[0,1,2,3]),
/// );
///
/// ```
pub fn into_rslice(self) -> RSlice<'a, T> {
self.into_slice().into()
}
/// Creates a `&'_ mut [T]` with access to all the elements of this slice.
///
/// # Example
///
/// ```
/// use abi_stable::std_types::RSliceMut;
///
/// assert_eq!(RSliceMut::from_mut_slice(&mut[0,1,2,3]).as_mut_slice(), &mut [0,1,2,3]);
///
/// ```
pub fn as_mut_slice(&mut self) -> &mut [T] {
unsafe { self.as_mut_slice_unbounded_lifetime() }
}
/// Creates a `&'a mut [T]` with access to all the elements of this slice.
///
/// This is different to `as_mut_slice` in that the returned lifetime of
/// this function is larger.
///
/// # Example
///
/// ```
/// use abi_stable::std_types::RSliceMut;
///
/// assert_eq!(RSliceMut::from_mut_slice(&mut[0,1,2,3]).into_mut_slice(), &mut [0,1,2,3]);
///
/// ```
pub fn into_mut_slice(mut self) -> &'a mut [T] {
unsafe { self.as_mut_slice_unbounded_lifetime() }
}
}
unsafe impl<'a, T> Send for RSliceMut<'a, T> where &'a mut [T]: Send {}
unsafe impl<'a, T> Sync for RSliceMut<'a, T> where &'a mut [T]: Sync {}
impl<'a, T> Default for RSliceMut<'a, T> {
fn default() -> Self {
(&mut [][..]).into()
}
}
impl<'a, T> IntoIterator for RSliceMut<'a, T> {
type Item = &'a mut T;
type IntoIter = ::std::slice::IterMut<'a, T>;
fn into_iter(self) -> ::std::slice::IterMut<'a, T> {
self.into_mut_slice().iter_mut()
}
}
impl<'a, T> Deref for RSliceMut<'a, T> {
type Target = [T];
fn deref(&self) -> &Self::Target {
self.as_slice()
}
}
impl<'a, T> DerefMut for RSliceMut<'a, T> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.as_mut_slice()
}
}
////////////////////////////
impl_into_rust_repr! {
impl['a, T] Into<&'a mut [T]> for RSliceMut<'a, T> {
fn(this){
this.into_mut_slice()
}
}
}
impl<'a, T> Into<&'a [T]> for RSliceMut<'a, T> {
fn into(self) -> &'a [T] {
self.into_slice()
}
}
////////////////////
impl<'a,T:'a> Borrow<[T]> for RSliceMut<'a,T>{
fn borrow(&self)->&[T]{
self
}
}
impl<'a,T:'a> BorrowMut<[T]> for RSliceMut<'a,T>{
fn borrow_mut(&mut self)->&mut [T]{
self
}
}
impl<'a,T:'a> AsRef<[T]> for RSliceMut<'a,T>{
fn as_ref(&self)->&[T]{
self
}
}
impl<'a,T:'a> AsMut<[T]> for RSliceMut<'a,T>{
fn as_mut(&mut self)->&mut [T]{
self
}
}
////////////////////////////
impl<'a, T> Serialize for RSliceMut<'a, T>
where
T: Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.as_slice().serialize(serializer)
}
}
///////////////////////////////////////////////////////////////////////////////
impl<'a> Write for RSliceMut<'a, u8> {
#[inline]
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
let mut this = mem::take(self).into_mut_slice();
let ret = this.write(data);
*self = this.into();
ret
}
#[inline]
fn write_all(&mut self, data: &[u8]) -> io::Result<()> {
let mut this = mem::take(self).into_mut_slice();
let ret = this.write_all(data);
*self = this.into();
ret
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
///////////////////////////////////////////////////////////////////////////////
#[allow(dead_code)]
type SliceMut<'a, T> = &'a mut [T];
shared_impls! {
mod=slice_impls
new_type=RSliceMut['a][T],
original_type=SliceMut,
}
////////////////////////////////////////////////////////////////////////////////
//#[cfg(test)]
#[cfg(all(test,not(feature="only_new_tests")))]
mod test {
use super::*;
#[test]
fn from_to_slice() {
let a = b"what the hell".to_vec();
let mut a_clone = a.clone();
let a_addr = a_clone.as_ptr();
let mut b = RSliceMut::from(&mut a_clone[..]);
assert_eq!(&*a, &*b);
assert_eq!(&*a, &mut *b);
assert_eq!(a_addr, b.data());
assert_eq!(a.len(), b.len());
}
}
| RSliceMut |
main.rs | mod logging;
mod connection_pool;
mod http;
mod server;
mod orchestration;
mod routing;
mod agents;
mod configuration;
mod commands;
use std::net::{TcpListener, TcpStream};
use std::{fs, thread, time};
use std::fs::read_to_string;
use std::process::{Command, Output};
use std::sync::mpsc::channel;
use std::thread::Thread;
use crate::connection_pool::ConnectionPool;
use crate::logging::logging::{Log, Logger};
use crate::server::Server;
use regex::{Error, Regex};
use serde_json::{Map, Value};
use crate::agents::{Agent, MessageType};
use crate::commands::{format_output, run_command, run_command_static};
use crate::http::HttpResponse;
use crate::routing::{Route, RouteHandler, RouteMap};
use crate::configuration::*;
use crate::orchestration::{Aggregator, Orchestrator};
fn | () {
let jobs_config = JobsConfiguration::load("jobs.json".to_string()).unwrap();
let log = Log::create().unwrap();
let logger = log.get_logger();
let (job_sender, job_receiver) = channel();
let orch_logger = log.get_logger();
let aggregator = Aggregator::start(log.get_logger());
let orch_agg = aggregator.clone();
let _ = thread::spawn(|| {
Orchestrator::run(job_receiver, orch_agg, jobs_config, orch_logger)
});
match Configuration::load("config.json".to_string(), job_sender.clone()) {
Ok(config) => {
//println!("{:?}", jobs_config);
Server::start(config, log.get_logger())
}
Err(e) => {
println!("Error loading config: {}", e)
}
}
} | main |
tf_utils.py | import os
import tensorflow as tf
######################
### Graph creation ###
######################
def create_session_and_graph(gpu_device=None, gpu_frac=None):
if gpu_device is None:
gpu_device = 0
if gpu_frac is None:
gpu_frac = 0.95
os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu_device)
tf_graph = tf.Graph()
if len(str(gpu_device)) > 0:
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=gpu_frac)
config = tf.ConfigProto(
gpu_options=gpu_options,
log_device_placement=False,
allow_soft_placement=True,
)
else:
config = tf.ConfigProto(
device_count={'GPU': 0},
log_device_placement=False,
allow_soft_placement=True,
)
tf_sess = tf.Session(graph=tf_graph, config=config)
return tf_sess, tf_graph
##################
### Optimizing ###
##################
def minimize_and_clip(optimizer, objective, var_list, clip_val=10):
"""Minimized `objective` using `optimizer` w.r.t. variables in
`var_list` while ensure the norm of the gradients for each
variable is clipped to `clip_val`
"""
gradients = optimizer.compute_gradients(objective, var_list=var_list)
for i, (grad, var) in enumerate(gradients):
if grad is not None:
gradients[i] = (tf.clip_by_norm(grad, clip_val), var)
return optimizer.apply_gradients(gradients)
##################
### Operations ###
##################
def spatial_soft_argmax(features, dtype=tf.float32):
"""
features shape is [N, H, W, C]
"""
N = tf.shape(features)[0]
val_shape = features.get_shape()
H, W, C = val_shape[1].value, val_shape[2].value, val_shape[3].value
features = tf.reshape(
tf.transpose(features, [0, 3, 1, 2]),
[-1, H * W])
softmax = tf.nn.softmax(features)
spatial_softmax = tf.transpose(tf.reshape(softmax, [N, C, H, W]), [0, 2, 3, 1])
spatial_softmax_pos = tf.expand_dims(spatial_softmax, -1)
# TODO shape [H, W, 1, 2]
# TODO H or W is 1
assert(H != 1 and W != 1)
delta_h = 2. / tf.cast(H - 1, dtype)
delta_w = 2. / tf.cast(W - 1, dtype)
ran_h = tf.tile(tf.expand_dims(tf.range(-1., 1. + delta_h, delta_h, dtype=dtype), 1), [1, W])
ran_w = tf.tile(tf.expand_dims(tf.range(-1., 1 + delta_w, delta_w, dtype=dtype), 0), [H, 1])
image_pos = tf.expand_dims(tf.stack([ran_h, ran_w], 2), 2)
spatial_soft_amax = tf.reduce_sum(spatial_softmax_pos * image_pos, axis=[1, 2])
shaped_ssamax = tf.reshape(spatial_soft_amax, [N, C * 2])
return shaped_ssamax
def repeat_2d(x, reps, axis):
assert(axis == 0 or axis == 1)
if axis == 1:
x = tf.transpose(x)
static_shape = list(x.get_shape())
dyn_shape = tf.shape(x)
x_repeat = tf.reshape(tf.tile(x, [1, reps]), (dyn_shape[0] * reps, dyn_shape[1]))
if static_shape[0].value is not None:
static_shape[0] = tf.Dimension(static_shape[0].value *reps)
x_repeat.set_shape(static_shape)
if axis == 1:
x_repeat = tf.transpose(x_repeat)
return x_repeat
def | (X, Y):
"""
:param X: [N, U]
:param Y: [N, V]
"""
# tf.assert_equal(tf.shape(X)[0], tf.shape(Y)[0])
X_batch = tf.expand_dims(X, 2) # [N, U, 1]
Y_batch = tf.expand_dims(Y, 1) # [N, 1, V]
results = tf.batch_matmul(X_batch, Y_batch) # [N, U, V]
return results
def batch_outer_product_2d(X, Y):
"""
:param X: [N, U]
:param Y: [N, V]
:return [N, U * V]
"""
U = X.get_shape()[1].value
V = Y.get_shape()[1].value
assert(U is not None)
assert(V is not None)
X_tile = tf.tile(X, (1, V))
Y_repeat = repeat_2d(Y, U, 1)
return tf.multiply(X_tile, Y_repeat)
def gather_2d(x, idxs):
"""
:param x: 2d tensor
:param idxs: 1d tensor indexing the columns of x to gather
:return: 1d tensor
"""
assert(len(x.get_shape()) == 2)
tf.assert_equal(tf.shape(x)[0], tf.shape(idxs)[0])
idxs = tf.transpose(tf.pack([tf.range(tf.shape(idxs)[0]), idxs]))
x_gather = tf.gather_nd(x, idxs)
return x_gather
def block_diagonal(matrices, dtype=tf.float32):
"""Constructs block-diagonal matrices from a list of batched 2D tensors.
Args:
matrices: A list of Tensors with shape [..., N_i, M_i] (i.e. a list of
matrices with the same batch dimension).
dtype: Data type to use. The Tensors in `matrices` must match this dtype.
Returns:
A matrix with the input matrices stacked along its main diagonal, having
shape [..., \sum_i N_i, \sum_i M_i].
"""
matrices = [tf.convert_to_tensor(matrix, dtype=dtype) for matrix in matrices]
blocked_rows = tf.Dimension(0)
blocked_cols = tf.Dimension(0)
batch_shape = tf.TensorShape(None)
for matrix in matrices:
full_matrix_shape = matrix.get_shape().with_rank_at_least(2)
batch_shape = batch_shape.merge_with(full_matrix_shape[:-2])
blocked_rows += full_matrix_shape[-2]
blocked_cols += full_matrix_shape[-1]
ret_columns_list = []
for matrix in matrices:
matrix_shape = tf.shape(matrix)
ret_columns_list.append(matrix_shape[-1])
ret_columns = tf.add_n(ret_columns_list)
row_blocks = []
current_column = 0
for matrix in matrices:
matrix_shape = tf.shape(matrix)
row_before_length = current_column
current_column += matrix_shape[-1]
row_after_length = ret_columns - current_column
row_blocks.append(tf.pad(
tensor=matrix,
paddings=tf.concat(0,
[tf.zeros([tf.rank(matrix) - 1, 2], dtype=tf.int32),
[(row_before_length, row_after_length)]])))
blocked = tf.concat(-2, row_blocks)
blocked.set_shape(batch_shape.concatenate((blocked_rows, blocked_cols)))
return blocked
def sample_categorical(p):
# TODO change to tf.distributions once update tf version
dist = tf.contrib.distributions.Categorical(probs=p)
sample = dist.sample()
return sample
###############
### Asserts ###
###############
def assert_shape(tensor, shape):
assert(len(tensor.get_shape()) == len(shape))
tensor_shape = tf.shape(tensor)
for i, s_i in enumerate(shape):
tf.assert_equal(tensor_shape[i], tf.cast(s_i, tf.int32))
def assert_equal_approx(tensor, value, eps=1e-5, name=None):
return tf.assert_equal(tf.cast(tf.abs(tensor - value) < 1e-5, tf.int32), 1, name=name)
if __name__ == '__main__':
import numpy as np
np.random.seed(0)
tf.set_random_seed(0)
### repeat_2d test
a = tf.constant(np.random.random((2, 4)))
a0 = repeat_2d(a, 2, 0)
a1 = repeat_2d(a, 2, 1)
sess = tf.Session()
a_eval, a0_eval, a1_eval = sess.run([a, a0, a1])
print('\nrepeat 2d test')
print('a:\n{0}'.format(a_eval))
print('a0\n{0}'.format(a0_eval))
print('a1\n{0}'.format(a1_eval))
### test batch outer
a = tf.constant(np.random.random((3, 2)))
b = tf.constant(np.random.randint(0, 2, (3, 2)).astype(np.float64))
ab_outer = tf.reshape(batch_outer_product(b, a), (a.get_shape()[0].value, -1))
ab_outer_2d = batch_outer_product_2d(a, b)
a_eval, b_eval, ab_outer_eval, ab_outer_2d_eval = sess.run([a, b, ab_outer, ab_outer_2d])
print('\nbatch outer test')
print('a:\n{0}'.format(a_eval))
print('b:\n{0}'.format(b_eval))
print('ab_outer:\n{0}'.format(ab_outer_eval))
print('ab_outer_2d:\n{0}'.format(ab_outer_2d_eval))
| batch_outer_product |
t024_test.py | import os
import numpy as np
import pytest
from ci_framework import FlopyTestSetup, base_test_dir
import flopy
base_dir = base_test_dir(__file__, rel_path="temp", verbose=True)
ex_pth = os.path.join("..", "examples", "data", "mf2005_test")
testmodels = [
os.path.join(ex_pth, f) for f in os.listdir(ex_pth) if f.endswith(".nam")
]
@pytest.mark.parametrize(
"namfile",
testmodels,
)
def test_checker_on_load(namfile):
# load all of the models in the mf2005_test folder
# model level checks are performed by default on load()
checker_on_load(namfile)
def checker_on_load(mfnam):
f = os.path.basename(mfnam)
d = os.path.dirname(mfnam)
m = flopy.modflow.Modflow.load(f, model_ws=d)
assert isinstance(
m, flopy.modflow.Modflow
), "Not a flopy.modflow.Modflow instance"
def test_bcs_check():
|
def test_properties_check():
# test that storage values ignored for steady state
model_ws = f"{base_dir}_test_properties_check"
test_setup = FlopyTestSetup(verbose=True, test_dirs=model_ws)
mf = flopy.modflow.Modflow(
version="mf2005",
model_ws=model_ws,
)
dis = flopy.modflow.ModflowDis(
mf,
nrow=2,
ncol=2,
top=np.array([[100, np.nan], [100, 100]]),
nper=3,
steady=True,
)
chk = dis.check()
assert len(chk.summary_array) == 1
kij = (
chk.summary_array["k"][0],
chk.summary_array["i"][0],
chk.summary_array["j"][0],
)
assert kij == (0, 0, 1)
lpf = flopy.modflow.ModflowLpf(mf, sy=np.ones((2, 2)), ss=np.ones((2, 2)))
chk = lpf.check()
assert len(chk.summary_array) == 0
# test k values check
lpf = flopy.modflow.ModflowLpf(
mf,
hk=np.array([[1, 1e10], [1, -1]]),
hani=np.array([[1, 1], [1, -1]]),
vka=np.array([[1e10, 0], [1, 1e-20]]),
)
chk = lpf.check()
ind1 = np.array(
[
True if list(inds) == [0, 1, 1] else False
for inds in chk.view_summary_array_fields(["k", "i", "j"])
]
)
ind1_errors = chk.summary_array[ind1]["desc"]
ind2 = np.array(
[
True if list(inds) == [0, 0, 1] else False
for inds in chk.view_summary_array_fields(["k", "i", "j"])
]
)
ind2_errors = chk.summary_array[ind2]["desc"]
ind3 = np.array(
[
True if list(inds) == [0, 0, 0] else False
for inds in chk.view_summary_array_fields(["k", "i", "j"])
]
)
ind3_errors = chk.summary_array[ind3]["desc"]
assert (
"zero or negative horizontal hydraulic conductivity values"
in ind1_errors
)
assert (
"horizontal hydraulic conductivity values below checker threshold of 1e-11"
in ind1_errors
)
assert "negative horizontal anisotropy values" in ind1_errors
assert (
"vertical hydraulic conductivity values below checker threshold of 1e-11"
in ind1_errors
)
assert (
"horizontal hydraulic conductivity values above checker threshold of 100000.0"
in ind2_errors
)
assert (
"zero or negative vertical hydraulic conductivity values"
in ind2_errors
)
assert (
"vertical hydraulic conductivity values above checker threshold of 100000.0"
in ind3_errors
)
def test_oc_check():
m = flopy.modflow.Modflow()
oc = flopy.modflow.mfoc.ModflowOc(m)
chk = oc.check()
assert len(chk.summary_array) == 1, len(chk.summary_array)
assert "DIS package not available" in chk.summary_array[0]["desc"]
flopy.modflow.ModflowDis(m)
oc.stress_period_data = {(0, 0): ["save head", "save budget"]}
chk = oc.check() # check passsed
assert len(chk.summary_array) == 0, len(chk.summary_array)
oc.stress_period_data = {(0, 0): ["save"]}
chk = oc.check()
assert len(chk.summary_array) == 1, len(chk.summary_array)
assert "too few words" in chk.summary_array[0]["desc"]
oc.stress_period_data = {(0, 0): ["save it"]}
chk = oc.check()
assert len(chk.summary_array) == 1, len(chk.summary_array)
assert "action 'save it' ignored" in chk.summary_array[0]["desc"]
oc.stress_period_data = {(1, 1): ["save head", "save budget"]}
chk = oc.check()
assert len(chk.summary_array) == 1, len(chk.summary_array)
assert "OC stress_period_data ignored" in chk.summary_array[0]["desc"]
if __name__ == "__main__":
print(f"numpy version: {np.__version__}")
for mfnam in testmodels:
checker_on_load(mfnam)
test_bcs_check()
test_properties_check()
test_oc_check()
| model_ws = f"{base_dir}_test_bcs_check"
test_setup = FlopyTestSetup(verbose=True, test_dirs=model_ws)
mf = flopy.modflow.Modflow(version="mf2005", model_ws=model_ws)
# test check for isolated cells
dis = flopy.modflow.ModflowDis(
mf, nlay=2, nrow=3, ncol=3, top=100, botm=95
)
bas = flopy.modflow.ModflowBas(mf, ibound=np.ones((2, 3, 3), dtype=int))
chk = bas.check()
dis = flopy.modflow.ModflowDis(
mf, nlay=3, nrow=5, ncol=5, top=100, botm=95
)
ibound = np.zeros((3, 5, 5), dtype=int)
ibound[1, 1, 1] = 1 # fully isolated cell
ibound[0:2, 4, 4] = 1 # cell connected vertically to one other cell
bas = flopy.modflow.ModflowBas(mf, ibound=ibound)
mf._mg_resync = True
chk = bas.check()
assert chk.summary_array["desc"][0] == "isolated cells in ibound array"
assert (
chk.summary_array.i[0] == 1
and chk.summary_array.i[0] == 1
and chk.summary_array.j[0] == 1
)
assert len(chk.summary_array) == 1
ghb = flopy.modflow.ModflowGhb(
mf, stress_period_data={0: [0, 0, 0, 100, 1]}
)
riv = flopy.modflow.ModflowRiv(
mf,
stress_period_data={
0: [[0, 0, 0, 101, 10, 100], [0, 0, 1, 80, 10, 90]]
},
)
chk = ghb.check()
assert chk.summary_array["desc"][0] == "BC in inactive cell"
chk = riv.check()
assert chk.summary_array["desc"][4] == "RIV stage below rbots"
assert np.array_equal(chk.summary_array["j"], np.array([0, 1, 1, 1, 1])) |
input_manager.rs | pub use ActionState as InputActionState;
use amethyst::input::{BindingTypes, InputHandler};
use std::collections::HashMap;
use std::hash::Hash;
type AxisValue = f32;
#[derive(Clone, Copy, PartialEq)]
pub enum ActionState {
Down,
Up,
Pressed,
None,
}
impl Default for ActionState {
fn default() -> Self {
ActionState::None
}
}
/// Manages input actions.
/// Stores data about which actions are _down_, _up_, or being _pressed_.
pub struct InputManager<B>
where
B: BindingTypes + Eq + Hash,
{
actions: HashMap<B::Action, ActionState>,
axes: HashMap<B::Axis, AxisValue>,
}
impl<B> Default for InputManager<B>
where
B: BindingTypes + Eq + Hash,
{
fn default() -> Self {
Self {
actions: HashMap::new(),
axes: HashMap::new(),
}
}
}
impl<B> InputManager<B>
where
B: BindingTypes + Eq + Hash,
{
/// Creates a new `InputManager`.
pub fn new() -> Self {
Self {
actions: HashMap::new(),
axes: HashMap::new(),
}
}
/// Returns `true` if the action with the given name was pressed _down_.
pub fn | (&self, action: B::Action) -> bool {
self.is_action_in_state(action, ActionState::Down)
}
/// Returns `true` if the action with the given name was released (_up_).
pub fn is_up(&self, action: B::Action) -> bool {
self.is_action_in_state(action, ActionState::Up)
}
/// Returns `true` if the action with the given name is being _pressed_ down,
/// or if it was pressed _down_.
pub fn is_pressed(&self, action: B::Action) -> bool {
self.is_action_in_state(action.clone(), ActionState::Pressed)
|| self.is_action_in_state(action, ActionState::Down)
}
/// Behaves identically to `amethyst::input::InputHandler::axis_value`.
pub fn axis_value(&self, axis: B::Axis) -> Option<AxisValue> {
self.axes.get(&axis).map(Clone::clone)
}
/// Similar to `axis_value`, but instead of passing a specific axis string ID,
/// pass a function, which is called with every registered axis ID and value; the function returns a boolean;
/// when the function returns `true`, then return the axis value of that axis.
pub fn axis_value_find<F>(&self, find_func: F) -> Option<AxisValue>
where
F: Fn(&(&B::Axis, &AxisValue)) -> bool,
{
self.axes.iter().find(find_func).map(|(_, value)| *value)
}
// TODO: REFACTOR + DOCS
pub fn actions_for_each<F>(&self, target_state: ActionState, fun: F)
where
F: FnMut(&B::Action),
{
self.actions
.iter()
.filter_map(|(action, state)| {
if state == &target_state {
Some(action)
} else {
None
}
})
.for_each(fun);
}
fn is_action_in_state(
&self,
action: B::Action,
state: ActionState,
) -> bool {
if let Some(s) = self.actions.get(&action) {
s == &state
} else {
false
}
}
/// This method is called every frame, by the `InputManagerSystem`.
pub fn update(&mut self, input: &InputHandler<B>) {
self.update_actions(input);
self.update_axes(input);
}
fn update_actions(&mut self, input: &InputHandler<B>) {
for action in input.bindings.actions() {
let state = self
.actions
.entry(action.clone())
.or_insert(ActionState::default());
if let Some(is_down) = input.action_is_down(&action) {
if is_down {
// IS DOWN
*state = match state {
// Was previously `Down` or `Pressed`, becomes or stays `Pressed`, as it is still pressed down.
ActionState::Down | ActionState::Pressed => {
ActionState::Pressed
}
// Was previously `Up` or `None`, becomes `Down`, as it is now newly pressed.
ActionState::Up | ActionState::None => {
ActionState::Down
}
};
} else {
// NOT DOWN
*state = match state {
// Was previously `Down` or `Pressed`, becomes `Up`, as it is no longer pressed.
ActionState::Down | ActionState::Pressed => {
ActionState::Up
}
// Was previously `Up` or `None`, becomes or stays `None`.
ActionState::Up | ActionState::None => {
ActionState::None
}
};
}
} else {
panic!(format!("Action should exist: {:?}", action));
}
}
}
fn update_axes(&mut self, input: &InputHandler<B>) {
for axis in input.bindings.axes() {
if let Some(value) = input.axis_value(axis) {
self.axes.insert(axis.clone(), value);
} else {
panic!(format!("Axis should exist: {:?}", axis));
}
}
}
}
| is_down |
pubsub.py | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0.
import argparse
from awscrt import io, mqtt, auth, http
from awsiot import mqtt_connection_builder
import sys
import threading
import time
from uuid import uuid4
import json
# This sample uses the Message Broker for AWS IoT to send and receive messages
# through an MQTT connection. On startup, the device connects to the server,
# subscribes to a topic, and begins publishing messages to that topic.
# The device should receive those same messages back from the message broker,
# since it is subscribed to that same topic.
parser = argparse.ArgumentParser(description="Send and receive messages through and MQTT connection.")
parser.add_argument('--endpoint', required=True, help="Your AWS IoT custom endpoint, not including a port. " +
"Ex: \"abcd123456wxyz-ats.iot.us-east-1.amazonaws.com\"")
parser.add_argument('--cert', help="File path to your client certificate, in PEM format.")
parser.add_argument('--key', help="File path to your private key, in PEM format.")
parser.add_argument('--root-ca', help="File path to root certificate authority, in PEM format. " +
"Necessary if MQTT server uses a certificate that's not already in " +
"your trust store.")
parser.add_argument('--client-id', default="test-" + str(uuid4()), help="Client ID for MQTT connection.")
parser.add_argument('--topic', default="test/topic", help="Topic to subscribe to, and publish messages to.")
parser.add_argument('--message', default="Hello World!", help="Message to publish. " +
"Specify empty string to publish nothing.")
parser.add_argument('--count', default=10, type=int, help="Number of messages to publish/receive before exiting. " +
"Specify 0 to run forever.")
parser.add_argument('--use-websocket', default=False, action='store_true',
help="To use a websocket instead of raw mqtt. If you " +
"specify this option you must specify a region for signing, you can also enable proxy mode.")
parser.add_argument('--signing-region', default='us-east-1', help="If you specify --use-web-socket, this " +
"is the region that will be used for computing the Sigv4 signature")
parser.add_argument('--proxy-host', help="Hostname for proxy to connect to. Note: if you use this feature, " +
"you will likely need to set --root-ca to the ca for your proxy.")
parser.add_argument('--proxy-port', type=int, default=8080, help="Port for proxy to connect to.")
parser.add_argument('--verbosity', choices=[x.name for x in io.LogLevel], default=io.LogLevel.NoLogs.name,
help='Logging level')
parser.add_argument('--interval', type=int, default=1)
parser.add_argument('--devicename', default='')
# Using globals to simplify sample code
args = parser.parse_args()
io.init_logging(getattr(io.LogLevel, args.verbosity), 'stderr')
received_count = 0
received_all_event = threading.Event()
# Callback when connection is accidentally lost.
def on_connection_interrupted(connection, error, **kwargs):
print("Connection interrupted. error: {}".format(error))
# Callback when an interrupted connection is re-established.
def on_connection_resumed(connection, return_code, session_present, **kwargs):
print("Connection resumed. return_code: {} session_present: {}".format(return_code, session_present))
if return_code == mqtt.ConnectReturnCode.ACCEPTED and not session_present:
print("Session did not persist. Resubscribing to existing topics...")
resubscribe_future, _ = connection.resubscribe_existing_topics()
# Cannot synchronously wait for resubscribe result because we're on the connection's event-loop thread,
# evaluate result with a callback instead.
resubscribe_future.add_done_callback(on_resubscribe_complete)
def on_resubscribe_complete(resubscribe_future):
resubscribe_results = resubscribe_future.result()
print("Resubscribe results: {}".format(resubscribe_results))
for topic, qos in resubscribe_results['topics']:
if qos is None:
sys.exit("Server rejected resubscribe to topic: {}".format(topic))
# Callback when the subscribed topic receives a message
def on_message_received(topic, payload, dup, qos, retain, **kwargs):
print("Received message from topic '{}': {}".format(topic, payload))
global received_count
received_count += 1
if received_count == args.count:
received_all_event.set()
if __name__ == '__main__':
# Spin up resources
event_loop_group = io.EventLoopGroup(1)
host_resolver = io.DefaultHostResolver(event_loop_group)
client_bootstrap = io.ClientBootstrap(event_loop_group, host_resolver)
if args.use_websocket == True:
proxy_options = None
if (args.proxy_host):
proxy_options = http.HttpProxyOptions(host_name=args.proxy_host, port=args.proxy_port) | mqtt_connection = mqtt_connection_builder.websockets_with_default_aws_signing(
endpoint=args.endpoint,
client_bootstrap=client_bootstrap,
region=args.signing_region,
credentials_provider=credentials_provider,
websocket_proxy_options=proxy_options,
ca_filepath=args.root_ca,
on_connection_interrupted=on_connection_interrupted,
on_connection_resumed=on_connection_resumed,
client_id=args.client_id,
clean_session=False,
keep_alive_secs=6)
else:
mqtt_connection = mqtt_connection_builder.mtls_from_path(
endpoint=args.endpoint,
cert_filepath=args.cert,
pri_key_filepath=args.key,
client_bootstrap=client_bootstrap,
ca_filepath=args.root_ca,
on_connection_interrupted=on_connection_interrupted,
on_connection_resumed=on_connection_resumed,
client_id=args.client_id,
clean_session=False,
keep_alive_secs=6)
print("Connecting to {} with client ID '{}'...".format(
args.endpoint, args.client_id))
connect_future = mqtt_connection.connect()
# Future.result() waits until a result is available
connect_future.result()
print("Connected!")
# Subscribe
print("Subscribing to topic '{}'...".format(args.topic))
subscribe_future, packet_id = mqtt_connection.subscribe(
topic=args.topic,
qos=mqtt.QoS.AT_LEAST_ONCE,
callback=on_message_received)
subscribe_result = subscribe_future.result()
print("Subscribed with {}".format(str(subscribe_result['qos'])))
# Publish message to server desired number of times.
# This step is skipped if message is blank.
# This step loops forever if count was set to 0.
if args.message:
if args.count == 0:
print ("Sending messages until program killed")
else:
print ("Sending {} message(s)".format(args.count))
publish_count = 1
while (publish_count <= args.count) or (args.count == 0):
jsonMessage = {"device_name": args.devicename, "data": {"temperature": 79.5, "humidity": 0.45} }
jsonData = json.dumps(jsonMessage)
print("Publishing message to topic '{}': {}".format(args.topic, jsonData))
mqtt_connection.publish(
topic=args.topic,
payload=jsonData,
qos=mqtt.QoS.AT_LEAST_ONCE)
time.sleep(args.interval)
publish_count += 1
# Wait for all messages to be received.
# This waits forever if count was set to 0.
if args.count != 0 and not received_all_event.is_set():
print("Waiting for all messages to be received...")
received_all_event.wait()
print("{} message(s) received.".format(received_count))
# Disconnect
print("Disconnecting...")
disconnect_future = mqtt_connection.disconnect()
disconnect_future.result()
print("Disconnected!") |
credentials_provider = auth.AwsCredentialsProvider.new_default_chain(client_bootstrap) |
testOverRecursed5.js | JSON.__proto__[1] = Uint8ClampedArray().buffer
f = (function() {
function | (c) {
Object.freeze(c).__proto__ = c
}
for each(b in []) {
try {
g(b)
} catch (e) {}
}
})
f()
f()
| g |
node.go | /*
* Copyright NetApp Inc, 2021 All rights reserved
*/
package node
import (
"bytes"
"encoding/xml"
"fmt"
"goharvest2/pkg/util"
"regexp"
"strings"
)
type Node struct {
parent *Node
name []byte
XMLName xml.Name
Attrs []xml.Attr `xml:",any,attr"`
Content []byte `xml:",innerxml"`
Children []*Node `xml:",any"`
}
func New(name []byte) *Node {
return &Node{name: name}
}
func NewS(name string) *Node {
return New([]byte(name))
}
func NewXml(name []byte) *Node {
return NewXmlS(string(name))
}
func NewXmlS(name string) *Node |
func (n *Node) GetXmlNameS() string {
return n.XMLName.Local
}
func (n *Node) SetXmlNameS(name string) {
n.XMLName = xml.Name{Local: name}
}
func (n *Node) GetName() []byte {
if name := n.GetXmlNameS(); name != "" {
return []byte(name)
}
return n.name
}
func (n *Node) GetNameS() string {
if name := n.GetXmlNameS(); name != "" {
return name
}
return string(n.name)
}
func (n *Node) SetName(name []byte) {
n.name = name
}
func (n *Node) SetNameS(name string) {
n.name = []byte(name)
}
func (n *Node) GetParent() *Node {
return n.parent
}
func (n *Node) GetAttr(name string) (xml.Attr, bool) {
var attr xml.Attr
for _, attr = range n.Attrs {
if attr.Name.Local == name {
return attr, true
}
}
return attr, false
}
func (n *Node) GetAttrValueS(name string) (string, bool) {
if attr, ok := n.GetAttr(name); ok {
return attr.Value, true
}
return "", false
}
func (n *Node) AddAttr(attr xml.Attr) {
n.Attrs = append(n.Attrs, attr)
}
func (n *Node) NewAttrS(name, value string) {
n.AddAttr(xml.Attr{Name: xml.Name{Local: name}, Value: value})
}
func (n *Node) GetChildren() []*Node {
return n.Children
}
func (n *Node) GetChild(name []byte) *Node {
for _, child := range n.Children {
if bytes.Equal(child.GetName(), name) {
return child
}
}
return nil
}
func (n *Node) GetChildS(name string) *Node {
for _, child := range n.Children {
if child.GetNameS() == name {
return child
}
}
return nil
}
func (n *Node) HasChild(name []byte) bool {
return n.GetChild(name) != nil
}
func (n *Node) HasChildS(name string) bool {
return n.GetChildS(name) != nil
}
func (n *Node) PopChild(name []byte) *Node {
for i, child := range n.Children {
if bytes.Equal(child.GetName(), name) {
n.Children[i] = n.Children[len(n.Children)-1]
n.Children = n.Children[:len(n.Children)-1]
return child
}
}
return nil
}
func (n *Node) PopChildS(name string) *Node {
return n.PopChild([]byte(name))
}
func (n *Node) NewChild(name, content []byte) *Node {
var child *Node
if n.GetXmlNameS() != "" {
child = NewXml(name)
} else {
child = New(name)
}
child.parent = n
child.Content = content
n.AddChild(child)
return child
}
func (n *Node) NewChildS(name, content string) *Node {
return n.NewChild([]byte(name), []byte(content))
}
func (n *Node) AddChild(child *Node) {
n.Children = append(n.Children, child)
}
func (n *Node) GetContent() []byte {
if content := bytes.TrimSpace(n.Content); len(content) != 0 {
if content[0] != '<' {
return content
}
}
return []byte("")
}
func (n *Node) GetContentS() string {
return string(n.Content)
}
/*
func (n *Node) GetContentIfHas() []byte {
content, _ := n.GetContent()
return content
}
func (n *Node) GetContentIfHasS() string {
return string(GetContentIfHas())
}*/
func (n *Node) GetChildContent(name []byte) []byte {
if child := n.GetChild(name); child != nil {
return child.GetContent()
}
return []byte("")
}
func (n *Node) GetChildContentS(name string) string {
if child := n.GetChildS(name); child != nil {
return child.GetContentS()
}
return ""
}
// GetChildByContent Compare child content
func (n *Node) GetChildByContent(content string) *Node {
for _, child := range n.Children {
if child.GetContentS() == content {
return child
}
}
return nil
}
func (n *Node) SetChildContentS(name, content string) {
if child := n.GetChildS(name); child != nil {
child.SetContentS(content)
} else {
n.NewChildS(name, content)
}
}
func (n *Node) GetAllChildContentS() []string {
content := make([]string, 0)
for _, ch := range n.Children {
content = append(content, ch.GetContentS())
}
return content
}
func (n *Node) GetAllChildNamesS() []string {
names := make([]string, 0)
for _, ch := range n.Children {
names = append(names, ch.GetNameS())
}
return names
}
func (n *Node) SetContent(content []byte) {
n.Content = content
}
func (n *Node) SetContentS(content string) {
n.SetContent([]byte(content))
}
func (n *Node) Copy() *Node {
var clone *Node
if n.GetXmlNameS() != "" {
clone = NewXml(n.GetName())
} else {
clone = New(n.GetName())
}
clone.SetContent(n.GetContent())
for _, child := range n.Children {
clone.Children = append(clone.Children, child.Copy())
}
return clone
}
func (n *Node) Union(source *Node) {
if len(n.GetContent()) == 0 {
n.SetContent(source.GetContent())
}
for _, child := range source.Children {
if !n.HasChild(child.GetName()) {
n.AddChild(child)
} else if child.GetChildren() != nil {
// union at child level
n.GetChild(child.GetName()).Union(child)
} else {
// child template would take precedence over parent
n.SetChildContentS(child.GetNameS(), child.GetContentS())
}
}
}
//fetchRoot return if a parent name ancestor exists
func (n *Node) searchAncestor(ancestor string) *Node {
if n == nil {
return nil
}
p := n.GetParent()
if p == nil {
return nil
}
if p != nil && p.GetNameS() == ancestor {
return n
}
return p.searchAncestor(ancestor)
}
func (me *Node) PreprocessTemplate() {
for _, child := range me.Children {
mine := me.GetChild(child.GetName())
if mine != nil && len(child.GetName()) > 0 {
if mine.searchAncestor("LabelAgent") != nil {
if len(mine.GetContentS()) > 0 {
mine.NewChildS("", child.GetContentS())
mine.SetContentS("")
}
}
mine.PreprocessTemplate()
}
}
}
//Merge method will merge the subtemplate into the receiver, modifying the receiver in-place.
//skipOverwrite is a readonly list of keys that will not be overwritten in the receiver.
func (me *Node) Merge(subtemplate *Node, skipOverwrite []string) {
if subtemplate == nil {
return
}
if len(me.Content) == 0 {
me.Content = subtemplate.Content
}
for _, child := range subtemplate.Children {
mine := me.GetChild(child.GetName())
if len(child.GetName()) == 0 {
if mine != nil && mine.GetParent() != nil && mine.GetParent().GetChildByContent(child.GetContentS()) == nil {
mine.GetParent().AddChild(child)
} else {
if me.GetChildByContent(child.GetContentS()) == nil {
me.AddChild(child)
}
}
} else if mine == nil {
me.AddChild(child)
} else {
if mine.GetParent() != nil && util.Contains(skipOverwrite, mine.GetParent().GetNameS()) {
mine.SetContentS(mine.GetContentS() + "," + child.GetContentS())
} else {
mine.SetContentS(child.GetContentS())
}
mine.Merge(child, skipOverwrite)
}
}
}
func (n *Node) UnmarshalXML(dec *xml.Decoder, root xml.StartElement) error {
n.Attrs = root.Attr
type node Node
return dec.DecodeElement((*node)(n), &root)
}
func (n *Node) FlatList(list *[]string, prefix string) {
if n == nil {
return
}
if len(n.Children) == 0 {
var sub string
if len(prefix) > 0 {
sub = prefix + " " + simpleName(n.GetContentS())
} else {
sub = simpleName(n.GetContentS())
}
*list = append(*list, sub)
} else {
nameS := n.GetNameS()
if len(nameS) > 0 && nameS != "counters" {
if prefix == "" {
prefix = nameS
} else {
prefix += " " + nameS
}
}
for _, child := range n.Children {
child.FlatList(list, prefix)
}
}
}
var wordRegex = regexp.MustCompile(`(\w|-)+`)
// simpleName returns the first word in the string s
// ignoring non-word characters. see node_test for examples
func simpleName(s string) string {
return wordRegex.FindString(s)
}
func (n *Node) Print(depth int) string {
builder := strings.Builder{}
n.printN(depth, &builder)
return builder.String()
}
func (n *Node) printN(depth int, b *strings.Builder) {
name := "* "
content := " *"
if n.GetNameS() != "" {
name = n.GetNameS()
}
if len(n.GetContentS()) > 0 && n.GetContentS()[0] != '<' {
content = n.GetContentS()
}
fname := fmt.Sprintf("%s[%s]", strings.Repeat(" ", depth), name)
b.WriteString(fmt.Sprintf("%-50s - %35s\n", fname, content))
for _, child := range n.Children {
child.printN(depth+1, b)
}
}
func (n *Node) SearchContent(prefix []string, paths [][]string) ([]string, bool) {
//fmt.Printf("SearchContent: prefix=%v \t paths=%v\n", prefix, paths)
var search func(*Node, []string)
matches := make([]string, 0)
search = func(node *Node, current_path []string) {
var new_path []string
if len(current_path) > 0 || prefix[0] == node.GetNameS() {
new_path = append(current_path, node.GetNameS())
} else {
new_path = make([]string, len(current_path))
copy(new_path, current_path)
}
//fmt.Printf(" -> current_path=%v \t new_path=%v\n", current_path, new_path)
for _, path := range paths {
if util.EqualStringSlice(new_path, path) {
matches = append(matches, node.GetContentS())
//fmt.Println(" MATCH!")
break
}
}
if len(new_path) < util.MaxLen(paths) {
for _, child := range node.GetChildren() {
search(child, new_path)
}
}
}
search(n, []string{})
//fmt.Printf("matches (%d):\n%v\n", len(matches), matches)
return matches, len(matches) == len(paths)
}
func (n *Node) SearchChildren(path []string) []*Node {
var search func(*Node, []string)
matches := make([]*Node, 0)
search = func(node *Node, current_path []string) {
var new_path []string
if len(current_path) > 0 || path[0] == node.GetNameS() {
new_path = append(current_path, node.GetNameS())
} else {
new_path = make([]string, len(current_path))
copy(new_path, current_path)
}
if util.EqualStringSlice(new_path, path) {
matches = append(matches, node)
} else if len(new_path) < len(path) {
for _, child := range node.GetChildren() {
search(child, new_path)
}
}
}
search(n, []string{})
return matches
}
func DecodeHtml(x string) string {
x = strings.ReplaceAll(x, "&", "&")
x = strings.ReplaceAll(x, "<", "<")
x = strings.ReplaceAll(x, ">", ">")
x = strings.ReplaceAll(x, "'", "'")
x = strings.ReplaceAll(x, """, "\"")
x = strings.ReplaceAll(x, " ", "_") // not escape char, but wanted
x = strings.ReplaceAll(x, "-", "_")
return x
}
| {
// ugly solution to support xml
return &Node{XMLName: xml.Name{Local: name}}
} |
mod.rs | //! Trait Resolution. See the [rustc dev guide] for more information on how this works.
//!
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/traits/resolution.html
#[allow(dead_code)]
pub mod auto_trait;
mod chalk_fulfill;
pub mod codegen;
mod coherence;
mod const_evaluatable;
mod engine;
pub mod error_reporting;
mod fulfill;
pub mod misc;
mod object_safety;
mod on_unimplemented;
mod project;
pub mod query;
mod select;
mod specialize;
mod structural_match;
mod util;
pub mod wf;
use crate::infer::outlives::env::OutlivesEnvironment;
use crate::infer::{InferCtxt, RegionckMode, TyCtxtInferExt};
use crate::traits::error_reporting::InferCtxtExt as _;
use crate::traits::query::evaluate_obligation::InferCtxtExt as _;
use rustc_errors::ErrorReported;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::subst::{InternalSubsts, SubstsRef};
use rustc_middle::ty::{
self, GenericParamDefKind, ParamEnv, ToPredicate, Ty, TyCtxt, WithConstness,
};
use rustc_span::Span;
use std::fmt::Debug;
pub use self::FulfillmentErrorCode::*;
pub use self::ImplSource::*;
pub use self::ObligationCauseCode::*;
pub use self::SelectionError::*;
pub use self::coherence::{add_placeholder_note, orphan_check, overlapping_impls};
pub use self::coherence::{OrphanCheckErr, OverlapResult};
pub use self::engine::TraitEngineExt;
pub use self::fulfill::{FulfillmentContext, PendingPredicateObligation};
pub use self::object_safety::astconv_object_safety_violations;
pub use self::object_safety::is_vtable_safe_method;
pub use self::object_safety::MethodViolationCode;
pub use self::object_safety::ObjectSafetyViolation;
pub use self::on_unimplemented::{OnUnimplementedDirective, OnUnimplementedNote};
pub use self::project::{normalize, normalize_projection_type, normalize_to};
pub use self::select::{EvaluationCache, SelectionCache, SelectionContext};
pub use self::select::{EvaluationResult, IntercrateAmbiguityCause, OverflowError};
pub use self::specialize::specialization_graph::FutureCompatOverlapError;
pub use self::specialize::specialization_graph::FutureCompatOverlapErrorKind;
pub use self::specialize::{specialization_graph, translate_substs, OverlapError};
pub use self::structural_match::search_for_structural_match_violation;
pub use self::structural_match::NonStructuralMatchTy;
pub use self::util::{elaborate_predicates, elaborate_trait_ref, elaborate_trait_refs};
pub use self::util::{expand_trait_aliases, TraitAliasExpander};
pub use self::util::{
get_vtable_index_of_object_method, impl_item_is_final, predicate_for_trait_def, upcast_choices,
};
pub use self::util::{
supertrait_def_ids, supertraits, transitive_bounds, SupertraitDefIds, Supertraits,
};
pub use self::chalk_fulfill::FulfillmentContext as ChalkFulfillmentContext;
pub use rustc_infer::traits::*;
/// Whether to skip the leak check, as part of a future compatibility warning step.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum SkipLeakCheck {
Yes,
No,
}
impl SkipLeakCheck {
fn is_yes(self) -> bool {
self == SkipLeakCheck::Yes
}
}
/// The "default" for skip-leak-check corresponds to the current
/// behavior (do not skip the leak check) -- not the behavior we are
/// transitioning into.
impl Default for SkipLeakCheck {
fn default() -> Self {
SkipLeakCheck::No
}
}
/// The mode that trait queries run in.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum TraitQueryMode {
// Standard/un-canonicalized queries get accurate
// spans etc. passed in and hence can do reasonable
// error reporting on their own.
Standard,
// Canonicalized queries get dummy spans and hence
// must generally propagate errors to
// pre-canonicalization callsites.
Canonical,
}
/// Creates predicate obligations from the generic bounds.
pub fn predicates_for_generics<'tcx>(
cause: ObligationCause<'tcx>,
param_env: ty::ParamEnv<'tcx>,
generic_bounds: ty::InstantiatedPredicates<'tcx>,
) -> impl Iterator<Item = PredicateObligation<'tcx>> {
util::predicates_for_generics(cause, 0, param_env, generic_bounds)
}
/// Determines whether the type `ty` is known to meet `bound` and
/// returns true if so. Returns false if `ty` either does not meet
/// `bound` or is not known to meet bound (note that this is
/// conservative towards *no impl*, which is the opposite of the
/// `evaluate` methods).
pub fn type_known_to_meet_bound_modulo_regions<'a, 'tcx>(
infcx: &InferCtxt<'a, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
ty: Ty<'tcx>,
def_id: DefId,
span: Span,
) -> bool {
debug!(
"type_known_to_meet_bound_modulo_regions(ty={:?}, bound={:?})",
ty,
infcx.tcx.def_path_str(def_id)
);
let trait_ref = ty::TraitRef { def_id, substs: infcx.tcx.mk_substs_trait(ty, &[]) };
let obligation = Obligation {
param_env,
cause: ObligationCause::misc(span, hir::CRATE_HIR_ID),
recursion_depth: 0,
predicate: trait_ref.without_const().to_predicate(infcx.tcx),
};
let result = infcx.predicate_must_hold_modulo_regions(&obligation);
debug!(
"type_known_to_meet_ty={:?} bound={} => {:?}",
ty,
infcx.tcx.def_path_str(def_id),
result
);
if result && ty.has_infer_types_or_consts() {
// Because of inference "guessing", selection can sometimes claim
// to succeed while the success requires a guess. To ensure
// this function's result remains infallible, we must confirm
// that guess. While imperfect, I believe this is sound.
// The handling of regions in this area of the code is terrible,
// see issue #29149. We should be able to improve on this with
// NLL.
let mut fulfill_cx = FulfillmentContext::new_ignoring_regions();
// We can use a dummy node-id here because we won't pay any mind
// to region obligations that arise (there shouldn't really be any
// anyhow).
let cause = ObligationCause::misc(span, hir::CRATE_HIR_ID);
fulfill_cx.register_bound(infcx, param_env, ty, def_id, cause);
// Note: we only assume something is `Copy` if we can
// *definitively* show that it implements `Copy`. Otherwise,
// assume it is move; linear is always ok.
match fulfill_cx.select_all_or_error(infcx) {
Ok(()) => {
debug!(
"type_known_to_meet_bound_modulo_regions: ty={:?} bound={} success",
ty,
infcx.tcx.def_path_str(def_id)
);
true
}
Err(e) => {
debug!(
"type_known_to_meet_bound_modulo_regions: ty={:?} bound={} errors={:?}",
ty,
infcx.tcx.def_path_str(def_id),
e
);
false
}
}
} else {
result
}
}
fn do_normalize_predicates<'tcx>(
tcx: TyCtxt<'tcx>,
region_context: DefId,
cause: ObligationCause<'tcx>,
elaborated_env: ty::ParamEnv<'tcx>,
predicates: Vec<ty::Predicate<'tcx>>,
) -> Result<Vec<ty::Predicate<'tcx>>, ErrorReported> {
debug!(
"do_normalize_predicates(predicates={:?}, region_context={:?}, cause={:?})",
predicates, region_context, cause,
);
let span = cause.span;
tcx.infer_ctxt().enter(|infcx| {
// FIXME. We should really... do something with these region
// obligations. But this call just continues the older
// behavior (i.e., doesn't cause any new bugs), and it would
// take some further refactoring to actually solve them. In
// particular, we would have to handle implied bounds
// properly, and that code is currently largely confined to
// regionck (though I made some efforts to extract it
// out). -nmatsakis
//
// @arielby: In any case, these obligations are checked
// by wfcheck anyway, so I'm not sure we have to check
// them here too, and we will remove this function when
// we move over to lazy normalization *anyway*.
let fulfill_cx = FulfillmentContext::new_ignoring_regions();
let predicates =
match fully_normalize(&infcx, fulfill_cx, cause, elaborated_env, &predicates) {
Ok(predicates) => predicates,
Err(errors) => {
infcx.report_fulfillment_errors(&errors, None, false);
return Err(ErrorReported);
}
};
debug!("do_normalize_predictes: normalized predicates = {:?}", predicates);
// We can use the `elaborated_env` here; the region code only
// cares about declarations like `'a: 'b`.
let outlives_env = OutlivesEnvironment::new(elaborated_env);
infcx.resolve_regions_and_report_errors(
region_context,
&outlives_env,
RegionckMode::default(),
);
let predicates = match infcx.fully_resolve(&predicates) {
Ok(predicates) => predicates,
Err(fixup_err) => {
// If we encounter a fixup error, it means that some type
// variable wound up unconstrained. I actually don't know
// if this can happen, and I certainly don't expect it to
// happen often, but if it did happen it probably
// represents a legitimate failure due to some kind of
// unconstrained variable, and it seems better not to ICE,
// all things considered.
tcx.sess.span_err(span, &fixup_err.to_string());
return Err(ErrorReported);
}
};
if predicates.needs_infer() { | Ok(predicates)
}
})
}
// FIXME: this is gonna need to be removed ...
/// Normalizes the parameter environment, reporting errors if they occur.
pub fn normalize_param_env_or_error<'tcx>(
tcx: TyCtxt<'tcx>,
region_context: DefId,
unnormalized_env: ty::ParamEnv<'tcx>,
cause: ObligationCause<'tcx>,
) -> ty::ParamEnv<'tcx> {
// I'm not wild about reporting errors here; I'd prefer to
// have the errors get reported at a defined place (e.g.,
// during typeck). Instead I have all parameter
// environments, in effect, going through this function
// and hence potentially reporting errors. This ensures of
// course that we never forget to normalize (the
// alternative seemed like it would involve a lot of
// manual invocations of this fn -- and then we'd have to
// deal with the errors at each of those sites).
//
// In any case, in practice, typeck constructs all the
// parameter environments once for every fn as it goes,
// and errors will get reported then; so after typeck we
// can be sure that no errors should occur.
debug!(
"normalize_param_env_or_error(region_context={:?}, unnormalized_env={:?}, cause={:?})",
region_context, unnormalized_env, cause
);
let mut predicates: Vec<_> =
util::elaborate_predicates(tcx, unnormalized_env.caller_bounds().into_iter())
.map(|obligation| obligation.predicate)
.collect();
debug!("normalize_param_env_or_error: elaborated-predicates={:?}", predicates);
let elaborated_env =
ty::ParamEnv::new(tcx.intern_predicates(&predicates), unnormalized_env.reveal());
// HACK: we are trying to normalize the param-env inside *itself*. The problem is that
// normalization expects its param-env to be already normalized, which means we have
// a circularity.
//
// The way we handle this is by normalizing the param-env inside an unnormalized version
// of the param-env, which means that if the param-env contains unnormalized projections,
// we'll have some normalization failures. This is unfortunate.
//
// Lazy normalization would basically handle this by treating just the
// normalizing-a-trait-ref-requires-itself cycles as evaluation failures.
//
// Inferred outlives bounds can create a lot of `TypeOutlives` predicates for associated
// types, so to make the situation less bad, we normalize all the predicates *but*
// the `TypeOutlives` predicates first inside the unnormalized parameter environment, and
// then we normalize the `TypeOutlives` bounds inside the normalized parameter environment.
//
// This works fairly well because trait matching does not actually care about param-env
// TypeOutlives predicates - these are normally used by regionck.
let outlives_predicates: Vec<_> = predicates
.drain_filter(|predicate| match predicate.skip_binders() {
ty::PredicateAtom::TypeOutlives(..) => true,
_ => false,
})
.collect();
debug!(
"normalize_param_env_or_error: predicates=(non-outlives={:?}, outlives={:?})",
predicates, outlives_predicates
);
let non_outlives_predicates = match do_normalize_predicates(
tcx,
region_context,
cause.clone(),
elaborated_env,
predicates,
) {
Ok(predicates) => predicates,
// An unnormalized env is better than nothing.
Err(ErrorReported) => {
debug!("normalize_param_env_or_error: errored resolving non-outlives predicates");
return elaborated_env;
}
};
debug!("normalize_param_env_or_error: non-outlives predicates={:?}", non_outlives_predicates);
// Not sure whether it is better to include the unnormalized TypeOutlives predicates
// here. I believe they should not matter, because we are ignoring TypeOutlives param-env
// predicates here anyway. Keeping them here anyway because it seems safer.
let outlives_env: Vec<_> =
non_outlives_predicates.iter().chain(&outlives_predicates).cloned().collect();
let outlives_env =
ty::ParamEnv::new(tcx.intern_predicates(&outlives_env), unnormalized_env.reveal());
let outlives_predicates = match do_normalize_predicates(
tcx,
region_context,
cause,
outlives_env,
outlives_predicates,
) {
Ok(predicates) => predicates,
// An unnormalized env is better than nothing.
Err(ErrorReported) => {
debug!("normalize_param_env_or_error: errored resolving outlives predicates");
return elaborated_env;
}
};
debug!("normalize_param_env_or_error: outlives predicates={:?}", outlives_predicates);
let mut predicates = non_outlives_predicates;
predicates.extend(outlives_predicates);
debug!("normalize_param_env_or_error: final predicates={:?}", predicates);
ty::ParamEnv::new(tcx.intern_predicates(&predicates), unnormalized_env.reveal())
}
pub fn fully_normalize<'a, 'tcx, T>(
infcx: &InferCtxt<'a, 'tcx>,
mut fulfill_cx: FulfillmentContext<'tcx>,
cause: ObligationCause<'tcx>,
param_env: ty::ParamEnv<'tcx>,
value: &T,
) -> Result<T, Vec<FulfillmentError<'tcx>>>
where
T: TypeFoldable<'tcx>,
{
debug!("fully_normalize_with_fulfillcx(value={:?})", value);
let selcx = &mut SelectionContext::new(infcx);
let Normalized { value: normalized_value, obligations } =
project::normalize(selcx, param_env, cause, value);
debug!(
"fully_normalize: normalized_value={:?} obligations={:?}",
normalized_value, obligations
);
for obligation in obligations {
fulfill_cx.register_predicate_obligation(selcx.infcx(), obligation);
}
debug!("fully_normalize: select_all_or_error start");
fulfill_cx.select_all_or_error(infcx)?;
debug!("fully_normalize: select_all_or_error complete");
let resolved_value = infcx.resolve_vars_if_possible(&normalized_value);
debug!("fully_normalize: resolved_value={:?}", resolved_value);
Ok(resolved_value)
}
/// Normalizes the predicates and checks whether they hold in an empty environment. If this
/// returns true, then either normalize encountered an error or one of the predicates did not
/// hold. Used when creating vtables to check for unsatisfiable methods.
pub fn impossible_predicates<'tcx>(
tcx: TyCtxt<'tcx>,
predicates: Vec<ty::Predicate<'tcx>>,
) -> bool {
debug!("impossible_predicates(predicates={:?})", predicates);
let result = tcx.infer_ctxt().enter(|infcx| {
let param_env = ty::ParamEnv::reveal_all();
let mut selcx = SelectionContext::new(&infcx);
let mut fulfill_cx = FulfillmentContext::new();
let cause = ObligationCause::dummy();
let Normalized { value: predicates, obligations } =
normalize(&mut selcx, param_env, cause.clone(), &predicates);
for obligation in obligations {
fulfill_cx.register_predicate_obligation(&infcx, obligation);
}
for predicate in predicates {
let obligation = Obligation::new(cause.clone(), param_env, predicate);
fulfill_cx.register_predicate_obligation(&infcx, obligation);
}
fulfill_cx.select_all_or_error(&infcx).is_err()
});
debug!("impossible_predicates(predicates={:?}) = {:?}", predicates, result);
result
}
fn subst_and_check_impossible_predicates<'tcx>(
tcx: TyCtxt<'tcx>,
key: (DefId, SubstsRef<'tcx>),
) -> bool {
debug!("subst_and_check_impossible_predicates(key={:?})", key);
let mut predicates = tcx.predicates_of(key.0).instantiate(tcx, key.1).predicates;
predicates.retain(|predicate| !predicate.needs_subst());
let result = impossible_predicates(tcx, predicates);
debug!("subst_and_check_impossible_predicates(key={:?}) = {:?}", key, result);
result
}
/// Given a trait `trait_ref`, iterates the vtable entries
/// that come from `trait_ref`, including its supertraits.
#[inline] // FIXME(#35870): avoid closures being unexported due to `impl Trait`.
fn vtable_methods<'tcx>(
tcx: TyCtxt<'tcx>,
trait_ref: ty::PolyTraitRef<'tcx>,
) -> &'tcx [Option<(DefId, SubstsRef<'tcx>)>] {
debug!("vtable_methods({:?})", trait_ref);
tcx.arena.alloc_from_iter(supertraits(tcx, trait_ref).flat_map(move |trait_ref| {
let trait_methods = tcx
.associated_items(trait_ref.def_id())
.in_definition_order()
.filter(|item| item.kind == ty::AssocKind::Fn);
// Now list each method's DefId and InternalSubsts (for within its trait).
// If the method can never be called from this object, produce None.
trait_methods.map(move |trait_method| {
debug!("vtable_methods: trait_method={:?}", trait_method);
let def_id = trait_method.def_id;
// Some methods cannot be called on an object; skip those.
if !is_vtable_safe_method(tcx, trait_ref.def_id(), &trait_method) {
debug!("vtable_methods: not vtable safe");
return None;
}
// The method may have some early-bound lifetimes; add regions for those.
let substs = trait_ref.map_bound(|trait_ref| {
InternalSubsts::for_item(tcx, def_id, |param, _| match param.kind {
GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(),
GenericParamDefKind::Type { .. } | GenericParamDefKind::Const => {
trait_ref.substs[param.index as usize]
}
})
});
// The trait type may have higher-ranked lifetimes in it;
// erase them if they appear, so that we get the type
// at some particular call site.
let substs =
tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), &substs);
// It's possible that the method relies on where-clauses that
// do not hold for this particular set of type parameters.
// Note that this method could then never be called, so we
// do not want to try and codegen it, in that case (see #23435).
let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs);
if impossible_predicates(tcx, predicates.predicates) {
debug!("vtable_methods: predicates do not hold");
return None;
}
Some((def_id, substs))
})
}))
}
/// Check whether a `ty` implements given trait(trait_def_id).
///
/// NOTE: Always return `false` for a type which needs inference.
fn type_implements_trait<'tcx>(
tcx: TyCtxt<'tcx>,
key: (
DefId, // trait_def_id,
Ty<'tcx>, // type
SubstsRef<'tcx>,
ParamEnv<'tcx>,
),
) -> bool {
let (trait_def_id, ty, params, param_env) = key;
debug!(
"type_implements_trait: trait_def_id={:?}, type={:?}, params={:?}, param_env={:?}",
trait_def_id, ty, params, param_env
);
let trait_ref = ty::TraitRef { def_id: trait_def_id, substs: tcx.mk_substs_trait(ty, params) };
let obligation = Obligation {
cause: ObligationCause::dummy(),
param_env,
recursion_depth: 0,
predicate: trait_ref.without_const().to_predicate(tcx),
};
tcx.infer_ctxt().enter(|infcx| infcx.predicate_must_hold_modulo_regions(&obligation))
}
pub fn provide(providers: &mut ty::query::Providers) {
object_safety::provide(providers);
structural_match::provide(providers);
*providers = ty::query::Providers {
specialization_graph_of: specialize::specialization_graph_provider,
specializes: specialize::specializes,
codegen_fulfill_obligation: codegen::codegen_fulfill_obligation,
vtable_methods,
type_implements_trait,
subst_and_check_impossible_predicates,
mir_abstract_const: |tcx, def_id| {
let def_id = def_id.expect_local();
if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
tcx.mir_abstract_const_of_const_arg(def)
} else {
const_evaluatable::mir_abstract_const(tcx, ty::WithOptConstParam::unknown(def_id))
}
},
mir_abstract_const_of_const_arg: |tcx, (did, param_did)| {
const_evaluatable::mir_abstract_const(
tcx,
ty::WithOptConstParam { did, const_param_did: Some(param_did) },
)
},
try_unify_abstract_consts: const_evaluatable::try_unify_abstract_consts,
..*providers
};
} | tcx.sess.delay_span_bug(span, "encountered inference variables after `fully_resolve`");
Err(ErrorReported)
} else { |
describe_eip_monitor_data.go | package ecs
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses"
)
// DescribeEipMonitorData invokes the ecs.DescribeEipMonitorData API synchronously
func (client *Client) DescribeEipMonitorData(request *DescribeEipMonitorDataRequest) (response *DescribeEipMonitorDataResponse, err error) {
response = CreateDescribeEipMonitorDataResponse()
err = client.DoAction(request, response)
return
}
// DescribeEipMonitorDataWithChan invokes the ecs.DescribeEipMonitorData API asynchronously
func (client *Client) DescribeEipMonitorDataWithChan(request *DescribeEipMonitorDataRequest) (<-chan *DescribeEipMonitorDataResponse, <-chan error) {
responseChan := make(chan *DescribeEipMonitorDataResponse, 1)
errChan := make(chan error, 1)
err := client.AddAsyncTask(func() {
defer close(responseChan)
defer close(errChan)
response, err := client.DescribeEipMonitorData(request)
if err != nil {
errChan <- err
} else {
responseChan <- response
}
})
if err != nil {
errChan <- err
close(responseChan)
close(errChan)
}
return responseChan, errChan
}
// DescribeEipMonitorDataWithCallback invokes the ecs.DescribeEipMonitorData API asynchronously
func (client *Client) DescribeEipMonitorDataWithCallback(request *DescribeEipMonitorDataRequest, callback func(response *DescribeEipMonitorDataResponse, err error)) <-chan int {
result := make(chan int, 1)
err := client.AddAsyncTask(func() {
var response *DescribeEipMonitorDataResponse
var err error
defer close(result)
response, err = client.DescribeEipMonitorData(request)
callback(response, err)
result <- 1
})
if err != nil {
defer close(result)
callback(nil, err)
result <- 0
}
return result
}
// DescribeEipMonitorDataRequest is the request struct for api DescribeEipMonitorData
type DescribeEipMonitorDataRequest struct {
*requests.RpcRequest
ResourceOwnerId requests.Integer `position:"Query" name:"ResourceOwnerId"`
AllocationId string `position:"Query" name:"AllocationId"`
StartTime string `position:"Query" name:"StartTime"`
Period requests.Integer `position:"Query" name:"Period"`
ResourceOwnerAccount string `position:"Query" name:"ResourceOwnerAccount"`
OwnerAccount string `position:"Query" name:"OwnerAccount"`
EndTime string `position:"Query" name:"EndTime"`
OwnerId requests.Integer `position:"Query" name:"OwnerId"`
}
// DescribeEipMonitorDataResponse is the response struct for api DescribeEipMonitorData
type DescribeEipMonitorDataResponse struct {
*responses.BaseResponse
RequestId string `json:"RequestId" xml:"RequestId"`
EipMonitorDatas EipMonitorDatasInDescribeEipMonitorData `json:"EipMonitorDatas" xml:"EipMonitorDatas"`
}
// CreateDescribeEipMonitorDataRequest creates a request to invoke DescribeEipMonitorData API
func CreateDescribeEipMonitorDataRequest() (request *DescribeEipMonitorDataRequest) {
request = &DescribeEipMonitorDataRequest{
RpcRequest: &requests.RpcRequest{},
}
request.InitWithApiInfo("Ecs", "2014-05-26", "DescribeEipMonitorData", "", "")
request.Method = requests.POST
return
}
// CreateDescribeEipMonitorDataResponse creates a response to parse from DescribeEipMonitorData response
func CreateDescribeEipMonitorDataResponse() (response *DescribeEipMonitorDataResponse) | {
response = &DescribeEipMonitorDataResponse{
BaseResponse: &responses.BaseResponse{},
}
return
} |
|
server.go | package server
type IServer interface {
Start(addr string) error
Stop() | } | |
baremetal_secrets.go | package provisioning
import (
"context"
"crypto/rand"
"fmt"
"math/big"
"strings"
"github.com/pkg/errors"
"golang.org/x/crypto/bcrypt"
corev1 "k8s.io/api/core/v1"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
coreclientv1 "k8s.io/client-go/kubernetes/typed/core/v1"
"sigs.k8s.io/controller-runtime/pkg/controller/controllerutil"
metal3iov1alpha1 "github.com/openshift/cluster-baremetal-operator/api/v1alpha1"
)
const (
baremetalSecretName = "metal3-mariadb-password" // #nosec
baremetalSecretKey = "password"
ironicUsernameKey = "username"
ironicPasswordKey = "password"
ironicHtpasswdKey = "htpasswd"
ironicConfigKey = "auth-config"
ironicSecretName = "metal3-ironic-password"
ironicrpcSecretName = "metal3-ironic-rpc-password" // #nosec
ironicrpcUsername = "rpc-user"
ironicUsername = "ironic-user"
inspectorSecretName = "metal3-ironic-inspector-password"
inspectorUsername = "inspector-user"
)
func generateRandomPassword() (string, error) {
chars := []rune("ABCDEFGHIJKLMNOPQRSTUVWXYZ" +
"abcdefghijklmnopqrstuvwxyz" +
"0123456789")
length := 16
buf := make([]rune, length)
numChars := big.NewInt(int64(len(chars)))
for i := range buf {
c, err := rand.Int(rand.Reader, numChars)
if err != nil {
return "", err
}
buf[i] = chars[c.Uint64()]
}
return string(buf), nil
}
// CreateMariadbPasswordSecret creates a Secret for Mariadb password
func createMariadbPasswordSecret(client coreclientv1.SecretsGetter, targetNamespace string, baremetalConfig *metal3iov1alpha1.Provisioning, scheme *runtime.Scheme) error {
existing, err := client.Secrets(targetNamespace).Get(context.Background(), baremetalSecretName, metav1.GetOptions{})
if err == nil && len(existing.ObjectMeta.OwnerReferences) == 0 {
err = controllerutil.SetControllerReference(baremetalConfig, existing, scheme)
if err != nil {
return err
}
_, err = client.Secrets(targetNamespace).Update(context.Background(), existing, metav1.UpdateOptions{})
return err
}
if !apierrors.IsNotFound(err) {
return err
}
// Secret does not already exist. So, create one.
password, err := generateRandomPassword()
if err != nil {
return err
}
secret := &corev1.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: baremetalSecretName,
Namespace: targetNamespace,
},
StringData: map[string]string{
baremetalSecretKey: password,
},
}
err = controllerutil.SetControllerReference(baremetalConfig, secret, scheme)
if err != nil {
return err
}
_, err = client.Secrets(targetNamespace).Create(context.Background(), secret, metav1.CreateOptions{})
return err
}
func createIronicSecret(client coreclientv1.SecretsGetter, targetNamespace string, name string, username string, configSection string, baremetalConfig *metal3iov1alpha1.Provisioning, scheme *runtime.Scheme) error |
func CreateAllSecrets(client coreclientv1.SecretsGetter, targetNamespace string, baremetalConfig *metal3iov1alpha1.Provisioning, scheme *runtime.Scheme) error {
// Create a Secret for the Mariadb Password
if err := createMariadbPasswordSecret(client, targetNamespace, baremetalConfig, scheme); err != nil {
return errors.Wrap(err, "failed to create Mariadb password")
}
// Create a Secret for the Ironic Password
if err := createIronicSecret(client, targetNamespace, ironicSecretName, ironicUsername, "ironic", baremetalConfig, scheme); err != nil {
return errors.Wrap(err, "failed to create Ironic password")
}
// Create a Secret for the Ironic RPC Password
if err := createIronicSecret(client, targetNamespace, ironicrpcSecretName, ironicrpcUsername, "json_rpc", baremetalConfig, scheme); err != nil {
return errors.Wrap(err, "failed to create Ironic rpc password")
}
// Create a Secret for the Ironic Inspector Password
if err := createIronicSecret(client, targetNamespace, inspectorSecretName, inspectorUsername, "inspector", baremetalConfig, scheme); err != nil {
return errors.Wrap(err, "failed to create Inspector password")
}
return nil
}
func DeleteAllSecrets(info *ProvisioningInfo) error {
var secretErrors []string
if err := info.Client.CoreV1().Secrets(info.Namespace).Delete(context.Background(), baremetalSecretName, metav1.DeleteOptions{}); err != nil {
secretErrors = append(secretErrors, err.Error())
}
if err := info.Client.CoreV1().Secrets(info.Namespace).Delete(context.Background(), ironicSecretName, metav1.DeleteOptions{}); err != nil {
secretErrors = append(secretErrors, err.Error())
}
if err := info.Client.CoreV1().Secrets(info.Namespace).Delete(context.Background(), inspectorSecretName, metav1.DeleteOptions{}); err != nil {
secretErrors = append(secretErrors, err.Error())
}
if err := info.Client.CoreV1().Secrets(info.Namespace).Delete(context.Background(), ironicrpcSecretName, metav1.DeleteOptions{}); err != nil {
secretErrors = append(secretErrors, err.Error())
}
if len(secretErrors) > 0 {
return fmt.Errorf(strings.Join(secretErrors, "\n"))
} else {
return nil
}
}
| {
existing, err := client.Secrets(targetNamespace).Get(context.Background(), name, metav1.GetOptions{})
if err == nil && len(existing.ObjectMeta.OwnerReferences) == 0 {
err = controllerutil.SetControllerReference(baremetalConfig, existing, scheme)
if err != nil {
return err
}
_, err = client.Secrets(targetNamespace).Update(context.Background(), existing, metav1.UpdateOptions{})
return err
}
if !apierrors.IsNotFound(err) {
return err
}
// Secret does not already exist. So, create one.
password, err := generateRandomPassword()
if err != nil {
return err
}
hash, err := bcrypt.GenerateFromPassword([]byte(password), 5) // Use same cost as htpasswd default
if err != nil {
return err
}
// Change hash version from $2a$ to $2y$, as generated by htpasswd.
// These are equivalent for our purposes.
// Some background information about this : https://en.wikipedia.org/wiki/Bcrypt#Versioning_history
// There was a bug 9 years ago in PHP's implementation of 2a, so they decided to call the fixed version 2y.
// httpd decided to adopt this (if it sees 2a it uses elaborate heuristic workarounds to mitigate against the bug,
// but 2y is assumed to not need them), but everyone else (including go) was just decided to not implement the bug in 2a.
// The bug only affects passwords containing characters with the high bit set, i.e. not ASCII passwords generated here.
// Anyway, Ironic implemented their own basic auth verification and originally hard-coded 2y because that's what
// htpasswd produces (see https://review.opendev.org/738718). It is better to keep this as one day we may move the auth
// to httpd and this would prevent triggering the workarounds.
hash[2] = 'y'
secret := &corev1.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: name,
Namespace: targetNamespace,
},
StringData: map[string]string{
ironicUsernameKey: username,
ironicPasswordKey: password,
ironicHtpasswdKey: fmt.Sprintf("%s:%s", username, hash),
ironicConfigKey: fmt.Sprintf(`[%s]
auth_type = http_basic
username = %s
password = %s
`,
configSection, username, password),
},
}
err = controllerutil.SetControllerReference(baremetalConfig, secret, scheme)
if err != nil {
return err
}
_, err = client.Secrets(targetNamespace).Create(context.Background(), secret, metav1.CreateOptions{})
return err
} |
splines.py | #!/usr/bin/env python
#
# Copyright 2019 DFKI GmbH.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# -*- coding: utf-8 -*-
import numpy as np
import scipy.interpolate as si
import math
from .utils import closestLowerValueBinarySearch
B_SPLINE_DEGREE=3
class BSplineWrapper(object):
def __init__(self, points, degree=B_SPLINE_DEGREE, domain=None):
self.points = np.array(points)
if isinstance(points[0], (int, float, complex)):
self.dimensions = 1
else:
self.dimensions = len(points[0])
self.degree = degree
if domain is not None:
self.domain = domain
else:
self.domain = (0.0, 1.0)
self.initiated = True
self.spline_def = []
points_t = np.array(points).T
t_func = np.linspace(self.domain[0], self.domain[1], len(points)).tolist()
for d in range(len(points_t)):
#print d, self.dimensions
self.spline_def.append(si.splrep(t_func, points_t[d], w=None, k=3))
def _initiate_control_points(self):
return
def clear(self):
return
def queryPoint(self, u):
"""
"""
point = []
for d in range(self.dimensions):
point.append(si.splev(u, self.spline_def[d]))
return np.array(point)
def get_last_control_point(self):
return self.points[-1]
class BSpline(object):
"""
http://demonstrations.wolfram.com/GeneratingABSplineCurveByTheCoxDeBoorAlgorithm/
http://www.cs.mtu.edu/~shene/COURSES/cs3621/NOTES/spline/B-spline/bspline-basis.html
"""
def __init__(self, points, degree=3, domain=None):
self.points = np.array(points)
if isinstance(points[0], (int, float, complex)):
self.dimensions = 1
else:
self.dimensions = len(points[0])
self.degree = degree
if domain is not None:
self.domain = domain
else:
self.domain = (0.0, 1.0)
self.knots = None
self.initiated = False
self._create_knots()
def _initiate_control_points(self):
return
def clear(self): | return
def get_last_control_point(self):
return self.points[-1]
def _create_knots(self):
"""
http://www.cs.mtu.edu/~shene/COURSES/cs3621/NOTES/spline/B-spline/bspline-curve.html
#To change the shape of a B-spline curve, one can modify one or more of
#these control parameters:
#the positions of control points, the positions of knots, and the degree of the curve.
# given n+1 control points and m+1 knots the following property must be true
#m = n + p + 1. // p+1 = m-n
# for a campled curve the last knot must be of multiplicity p+1
If you have n+1 control points (n=9) and p = 3.
Then, m must be 13 so that the knot vector has 14 knots
The remaining 14 - (4 + 4) = 6 knots can be anywhere in the domain.
U = { 0, 0, 0, 0, 0.14, 0.28, 0.42, 0.57, 0.71, 0.85, 1, 1, 1, 1 }.
how do find the knot points C(ui).
"""
outer_knots = self.degree+1
print("multiplicity", outer_knots)
n = len(self.points) - 1
print("control points", len(self.points))
print("n", n)
p = self.degree
m = n + p + 1
n_knots = m + 1
inner_knots = n_knots-(outer_knots*2 - 2)
print("knots", n_knots)
print("free knots", inner_knots)
print("domain", self.domain)
#print np.linspace(0.0, 1.0, 4)
knots = np.linspace(self.domain[0], self.domain[1], inner_knots).tolist()
#print self.knots
self.knots = knots[:1] * (outer_knots-1) + knots +\
knots[-1:] * (outer_knots-1)
print(self.knots)
print(len(self.knots))
self.initiated = True
def queryPoint(self, u):
"""
"""
return self.evaluate(u, algorithm="deboor")
def evaluate(self, u, algorithm="standard"):
#print "evaluate", u
if self.domain[0] < u < self.domain[1]:
if algorithm == "standard":
value = 0.0#np.zeros(self.dim)
n = len(self.points)
w_list = []
for i in range(n):
#i+=self.degree
#print "iteration",i, self.basis(u, i, self.degree)
#i = self.get_begin_of_knot_range(u)
w = self.basis(u, i, self.degree)
w_list.append(w)
#print temp
value += w * self.points[i]
#print sum(w_list)
return value
elif algorithm == "deboor":
i = self.get_begin_of_knot_range(u)
#print u
return self.deboor(self.degree, self.degree, u, i)
elif u >= self.domain[1]:
return self.points[-1]
elif u <= self.domain[0]:
return self.points[0]
def basis(self, u, i, p):
"""http://devosaurus.blogspot.de/2013/10/exploring-b-splines-in-python.html
"""
if p == 0:
if self.knots[i] <= u < self.knots[i+1]:
return 1.0
else:
return 0.0
elif p >= 1:
#print i+p
#print "knot interval", i, i+p, self.knots[i+p]
out = 0.0
w_nom = (u-self.knots[i])
w_denom = (self.knots[i+p]-self.knots[i])
if w_denom > 0.0:
w = w_nom / w_denom
out += w * self.basis(u, i, p-1)
w_inv_nom = (self.knots[i+p+1] - u)
w_inv_denom = (self.knots[i+p+1] - self.knots[i+1])
if w_inv_denom > 0.0:
w_inv = w_inv_nom / w_inv_denom
out += w_inv * self.basis(u, i+1, p-1)
return out
def get_begin_of_knot_range(self, u):
begin_of_range = 0
for i, u_i in enumerate(self.knots):
if u_i < u:
begin_of_range = i
else:
break
#print "begin", begin_of_range
return begin_of_range
def deboor(self, k, p, u, i):
"""
https://chi3x10.wordpress.com/2009/10/18/de-boor-algorithm-in-c/
"""
if k == 0:
return self.points[i]
elif k >= 1:
denom = (self.knots[i+p+1-k] - self.knots[i])
if denom >0:
alpha = (u-self.knots[i])/denom
return (1-alpha) * self.deboor(k-1, p, u, i-1) \
+ (alpha * self.deboor(k-1, p, u, i))
else:
return np.zeros(self.dimensions)
class CatmullRomSpline():
'''
spline that goes through control points with arc length mapping used by motion planning
implemented using the following resources and examples:
#http://www.cs.cmu.edu/~462/projects/assn2/assn2/catmullRom.pdf
#http://algorithmist.net/docs/catmullrom.pdf
#http://www.mvps.org/directx/articles/catmull/
#http://hawkesy.blogspot.de/2010/05/catmull-rom-spline-curve-implementation.html
#http://pages.cpsc.ucalgary.ca/~jungle/587/pdf/5-interpolation.pdf
'''
def __init__(self,controlPoints, dimensions, granularity=100):
self.granularity = granularity
#http://algorithmist.net/docs/catmullrom.pdf
#base matrix to calculate one component of a point on the spline based on the influence of control points
self.catmullRomBaseMatrix = np.array([[-1.0, 3.0, -3.0, 1.0],
[2.0, -5.0, 4.0, -1.0],
[-1.0, 0.0, 1.0, 0.0],
[0.0, 2.0, 0.0, 0.0]])
self.dimensions = dimensions
self.fullArcLength = 0
self.initiated = False
self.controlPoints = []
self.numberOfSegments = 0
if len (controlPoints) >0:
self.initiateControlPoints(controlPoints)
self.initiated = True
def initiateControlPoints(self,controlPoints):
'''
@param controlPoints array of class accessible by controlPoints[index][dimension]
'''
self.numberOfSegments = len(controlPoints)-1
self.controlPoints = [controlPoints[0]]+controlPoints+[controlPoints[-1],controlPoints[-1]]#as a workaround add multiple points at the end instead of one
print("length of control point list ",len(self.controlPoints))
print("number of segments ",self.numberOfSegments)
print("number of dimensions",self.dimensions)
self.updateArcLengthMappingTable()
return
def addPoint(self,point):
#add point replace auxiliary control points
if self.initiated:
del self.controlPoints[-2:]
self.numberOfSegments = len(self.controlPoints)-1#"-2 + 1
self.controlPoints += [point,point,point]
print(self.controlPoints)
#update arc length mapping
self.updateArcLengthMappingTable()
else:
self.initiateControlPoints([point,])
self.initiated = True
def clear(self):
self.controlPoints = []
self.initiated = False
self.fullArcLength = 0
self.numberOfSegments = 0
self.arcLengthMap = []
def transformByMatrix(self,matrix):
'''
matrix nxn transformation matrix where n is the number of dimensions of the catmull rom spline
'''
if self.dimensions < matrix.shape[0]:
for i in range(len(self.controlPoints)):
self.controlPoints[i] = np.dot(matrix, self.controlPoints[i])
else:
print("failed",matrix.shape)
return
def updateArcLengthMappingTable(self):
'''
creates a table that maps from parameter space of query point to relative arc length based on the given granularity in the constructor of the catmull rom spline
http://pages.cpsc.ucalgary.ca/~jungle/587/pdf/5-interpolation.pdf
'''
self.fullArcLength = 0
granularity = self.granularity
u = np.arange(granularity+1) / float(granularity)
lastPoint = None
numberOfEvalulations = 0
self.arcLengthMap = []
for i in u:
point = self.queryPoint(i)
if lastPoint is not None:
delta = []
d = 0
while d < self.dimensions:
delta.append(math.sqrt((point[d]-lastPoint[d])**2))
d += 1
self.fullArcLength += np.sum(delta)#(point-lastPoint).length()
#print self.fullArcLength
self.arcLengthMap.append([i,self.fullArcLength])
numberOfEvalulations+=1
lastPoint= point
# self.fullArcLength = arcLength
#normalize values
if self.fullArcLength > 0 :
for i in range(numberOfEvalulations):
self.arcLengthMap[i][1] /= self.fullArcLength
def getFullArcLength(self, granularity = 100):
#granularity = self.granularity
u = np.arange(granularity+1) / float(granularity)
arcLength = 0.0
lastPoint = None
for i in u:
print("sample",i)
point = self.queryPoint(i)
if lastPoint != None:
arcLength += np.linalg.norm(point-lastPoint)#(point-lastPoint).length()
lastPoint= point
print(point)
return arcLength
def getDistanceToPath(self,absoluteArcLength, position):
'''
evaluates a point with absoluteArcLength on self to get a point on the path
then the distance between the given position and the point on the path is returned
'''
pointOnPath = self.getPointAtAbsoluteArcLength(absoluteArcLength)
return np.linalg.norm(position-pointOnPath)
def getLastControlPoint(self):
if len(self.controlPoints)> 0:
return self.controlPoints[-1]
else:
return [0,0,0]
def getArcLengthForParameter(self,t):
stepSize = 1/self.granularity
tableIndex = int(t/stepSize)
return self.arcLengthMap[tableIndex][1]*self.fullArcLength
def getPointAtAbsoluteArcLength(self,absoluteArcLength):
point = np.zeros((1,self.dimensions))#source of bug
if absoluteArcLength <= self.fullArcLength:
# parameterize curve by arc length
relativeArcLength = absoluteArcLength/self.fullArcLength
point = self.queryPointByRelativeArcLength(relativeArcLength)
else:
return None
# else:
# raise ValueError('%f exceeded arc length %f' % (absoluteArcLength,self.fullArcLength))
return point
def findClosestValuesInArcLengthMap(self,relativeArcLength):
'''
- given a relative arc length between 0 and 1 it uses closestLowerValueBinarySearch from the Generic Algorithms module to search the self.arcLengthMap for the values bounding the searched value
- returns floor parameter, ceiling parameter, floor arc length, ceiling arc length and a bool if the exact value was found
'''
foundExactValue = True
result = closestLowerValueBinarySearch(self.arcLengthMap,0,len(self.arcLengthMap)-1,relativeArcLength, getter = lambda A,i: A[i][1])#returns the index and a flag value, requires a getter for the array
index = result[0]
if result[1] == 0:#found exact value
floorP, ceilP = self.arcLengthMap[index][0],self.arcLengthMap[index][0]
floorL, ceilL = self.arcLengthMap[index][1],self.arcLengthMap[index][1]
foundExactValue = True
elif result[1] ==1:#found lower value
floorP = self.arcLengthMap[index][0]
floorL = self.arcLengthMap[index][1]
if index <len(self.arcLengthMap):#check array bounds
ceilP = self.arcLengthMap[index+1][0]
ceilL = self.arcLengthMap[index+1][1]
foundExactValue = False
else:
foundExactValue = True
ceilP= floorP
ceilL = floorL
elif result[1] ==2:#value smaller than smallest element in the array
ceilP = self.arcLengthMap[index][0]
floorL = self.arcLengthMap[index][1]
floorP = ceilP
ceilL = floorL
foundExactValue = True
elif result[1] ==3:#value larger than largest element in the array
ceilP = self.arcLengthMap[index][0]
ceilL = self.arcLengthMap[index][1]
floorP = ceilP
floorL = ceilL
foundExactValue = True
#print relativeArcLength,floorL,ceilL,foundExactValue
return floorP,ceilP,floorL,ceilL,foundExactValue
#see slide 30 of http://pages.cpsc.ucalgary.ca/~jungle/587/pdf/5-interpolation.pdf
#note it does a binary search so it is rather expensive to be called at every frame
def queryPointByRelativeArcLength(self,relativeArcLength):
floorP,ceilP,floorL,ceilL,foundExactValue = self.findClosestValuesInArcLengthMap(relativeArcLength)
if not foundExactValue:
alpha = (relativeArcLength-floorL)/(ceilL-floorL)#can be reused a-
#t = floorL+alpha*(ceilL-floorL)
t = floorP+alpha*(ceilP-floorP)
else:
t = floorP
#t = relativeArcLength#todo add correct mapping
return self.queryPoint(t)
def mapToSegment(self,t):
i = min(math.floor( self.numberOfSegments *t),self.numberOfSegments)#the part of t before i
localT =(self.numberOfSegments*t) -math.floor( self.numberOfSegments *t)#the rest, e.g. N = 10 and t = 0.62 => i = 6 and the rest is 0.02
#i = min(i,self.numberOfSegments)
return i+1,localT#increment i by 1 to ignore the first auxiliary control point
def getControlPointVectors(self,i):
i = int(i)
#if i<=self.numberOfSegments-2:
d = 0
vectors = []
while d < self.dimensions:
v = [float(self.controlPoints[i-1][d]),float(self.controlPoints[i][d]),float(self.controlPoints[i+1][d]),float(self.controlPoints[i+2][d])]
vectors.append(np.array(v))
d+=1
return vectors
#
def queryPoint(self, t):
i,localT = self.mapToSegment(t)
weightVector = np.array([localT**3,localT**2,localT,1])
controlPointVectors = self.getControlPointVectors(i)
point =[]
d =0
while d < self.dimensions:
point.append(self.queryValue(weightVector, controlPointVectors[d]))
d += 1
return np.array(point)
def queryValue(self, weightVector, controllPointVector):
v = np.dot(self.catmullRomBaseMatrix, controllPointVector)
v = np.dot(weightVector, v)
return 0.5 * v | |
calcfunctions.py | """
Tax-Calculator functions that calculate payroll and individual income taxes.
These functions are imported into the Calculator class.
Note: the parameter_indexing_CPI_offset policy parameter is the only
policy parameter that does not appear here; it is used in the policy.py
file to possibly adjust the price inflation rate used to index policy
parameters (as would be done in a reform that introduces chained-CPI
indexing).
"""
# CODING-STYLE CHECKS:
# pycodestyle calcfunctions.py
# pylint --disable=locally-disabled calcfunctions.py
#
# pylint: disable=too-many-lines
# pylint: disable=invalid-name
# pylint: disable=too-many-arguments
# pylint: disable=too-many-locals
import math
import copy
import numpy as np
from taxcalc.decorators import iterate_jit, JIT
def BenefitPrograms(calc):
"""
Calculate total government cost and consumption value of benefits
delivered by non-repealed benefit programs.
"""
# zero out benefits delivered by repealed programs
zero = np.zeros(calc.array_len)
if calc.policy_param('BEN_housing_repeal'):
calc.array('housing_ben', zero)
if calc.policy_param('BEN_ssi_repeal'):
calc.array('ssi_ben', zero)
if calc.policy_param('BEN_snap_repeal'):
calc.array('snap_ben', zero)
if calc.policy_param('BEN_tanf_repeal'):
calc.array('tanf_ben', zero)
if calc.policy_param('BEN_vet_repeal'):
calc.array('vet_ben', zero)
if calc.policy_param('BEN_wic_repeal'):
calc.array('wic_ben', zero)
if calc.policy_param('BEN_mcare_repeal'):
calc.array('mcare_ben', zero)
if calc.policy_param('BEN_mcaid_repeal'):
calc.array('mcaid_ben', zero)
if calc.policy_param('BEN_oasdi_repeal'):
calc.array('e02400', zero)
if calc.policy_param('BEN_ui_repeal'):
calc.array('e02300', zero)
if calc.policy_param('BEN_other_repeal'):
calc.array('other_ben', zero)
# calculate government cost of all benefits
cost = np.array(
calc.array('housing_ben') +
calc.array('ssi_ben') +
calc.array('snap_ben') +
calc.array('tanf_ben') +
calc.array('vet_ben') +
calc.array('wic_ben') +
calc.array('mcare_ben') +
calc.array('mcaid_ben') +
calc.array('e02400') +
calc.array('e02300') +
calc.array('ubi') +
calc.array('other_ben')
)
calc.array('benefit_cost_total', cost)
# calculate consumption value of all benefits
# (assuming that cash benefits have full value)
value = np.array(
calc.array('housing_ben') * calc.consump_param('BEN_housing_value') +
calc.array('ssi_ben') +
calc.array('snap_ben') * calc.consump_param('BEN_snap_value') +
calc.array('tanf_ben') * calc.consump_param('BEN_tanf_value') +
calc.array('vet_ben') * calc.consump_param('BEN_vet_value') +
calc.array('wic_ben') * calc.consump_param('BEN_wic_value') +
calc.array('mcare_ben') * calc.consump_param('BEN_mcare_value') +
calc.array('mcaid_ben') * calc.consump_param('BEN_mcaid_value') +
calc.array('e02400') +
calc.array('e02300') +
calc.array('ubi') +
calc.array('other_ben') * calc.consump_param('BEN_other_value')
)
calc.array('benefit_value_total', value)
@iterate_jit(nopython=True)
def EI_PayrollTax(SS_Earnings_c, e00200p, e00200s, pencon_p, pencon_s,
FICA_ss_trt, FICA_mc_trt, ALD_SelfEmploymentTax_hc,
SS_Earnings_thd, e00900p, e00900s, e02100p, e02100s, k1bx14p,
k1bx14s, payrolltax, ptax_was, setax, c03260, ptax_oasdi,
sey, earned, earned_p, earned_s,
was_plus_sey_p, was_plus_sey_s):
"""
Compute part of total OASDI+HI payroll taxes and earned income variables.
"""
# compute sey and its individual components
sey_p = e00900p + e02100p + k1bx14p
sey_s = e00900s + e02100s + k1bx14s
sey = sey_p + sey_s # total self-employment income for filing unit
# compute gross wage and salary income ('was' denotes 'wage and salary')
gross_was_p = e00200p + pencon_p
gross_was_s = e00200s + pencon_s
# compute taxable gross earnings for OASDI FICA
txearn_was_p = min(SS_Earnings_c, gross_was_p)
txearn_was_s = min(SS_Earnings_c, gross_was_s)
# compute OASDI and HI payroll taxes on wage-and-salary income, FICA
ptax_ss_was_p = FICA_ss_trt * txearn_was_p
ptax_ss_was_s = FICA_ss_trt * txearn_was_s
ptax_mc_was_p = FICA_mc_trt * gross_was_p
ptax_mc_was_s = FICA_mc_trt * gross_was_s
ptax_was = ptax_ss_was_p + ptax_ss_was_s + ptax_mc_was_p + ptax_mc_was_s
# compute taxable self-employment income for OASDI SECA
sey_frac = 1.0 - 0.5 * (FICA_ss_trt + FICA_mc_trt)
txearn_sey_p = min(max(0., sey_p * sey_frac), SS_Earnings_c - txearn_was_p)
txearn_sey_s = min(max(0., sey_s * sey_frac), SS_Earnings_c - txearn_was_s)
# compute self-employment tax on taxable self-employment income, SECA
setax_ss_p = FICA_ss_trt * txearn_sey_p
setax_ss_s = FICA_ss_trt * txearn_sey_s
setax_mc_p = FICA_mc_trt * max(0., sey_p * sey_frac)
setax_mc_s = FICA_mc_trt * max(0., sey_s * sey_frac)
setax_p = setax_ss_p + setax_mc_p
setax_s = setax_ss_s + setax_mc_s
setax = setax_p + setax_s
# compute extra OASDI payroll taxes on the portion of the sum
# of wage-and-salary income and taxable self employment income
# that exceeds SS_Earnings_thd
sey_frac = 1.0 - 0.5 * FICA_ss_trt
was_plus_sey_p = gross_was_p + max(0., sey_p * sey_frac)
was_plus_sey_s = gross_was_s + max(0., sey_s * sey_frac)
extra_ss_income_p = max(0., was_plus_sey_p - SS_Earnings_thd)
extra_ss_income_s = max(0., was_plus_sey_s - SS_Earnings_thd)
extra_payrolltax = (extra_ss_income_p * FICA_ss_trt +
extra_ss_income_s * FICA_ss_trt)
# compute part of total payroll taxes for filing unit
# (the ptax_amc part of total payroll taxes for the filing unit is
# computed in the AdditionalMedicareTax function below)
payrolltax = ptax_was + setax + extra_payrolltax
# compute OASDI part of payroll taxes
ptax_oasdi = (ptax_ss_was_p + ptax_ss_was_s +
setax_ss_p + setax_ss_s +
extra_payrolltax)
# compute earned* variables and AGI deduction for
# "employer share" of self-employment tax, c03260
# Note: c03260 is the amount on 2015 Form 1040, line 27
c03260 = (1. - ALD_SelfEmploymentTax_hc) * 0.5 * setax
earned = max(0., e00200p + e00200s + sey - c03260)
earned_p = max(0., (e00200p + sey_p -
(1. - ALD_SelfEmploymentTax_hc) * 0.5 * setax_p))
earned_s = max(0., (e00200s + sey_s -
(1. - ALD_SelfEmploymentTax_hc) * 0.5 * setax_s))
return (sey, payrolltax, ptax_was, setax, c03260, ptax_oasdi,
earned, earned_p, earned_s, was_plus_sey_p, was_plus_sey_s)
@iterate_jit(nopython=True)
def DependentCare(nu13, elderly_dependents, earned,
MARS, ALD_Dependents_thd, ALD_Dependents_hc,
ALD_Dependents_Child_c, ALD_Dependents_Elder_c,
care_deduction):
"""
Computes dependent-care above-the-line deduction.
Parameters
----------
nu13: Number of dependents under 13 years old
elderly_dependents: number of elderly dependents
earned: Form 2441 earned income amount
MARS: Marital Status
ALD_Dependents_thd: Maximum income to qualify for deduction
ALD_Dependents_hc: Deduction for dependent care haircut
ALD_Dependents_Child_c: National weighted average cost of childcare
ALD_Dependents_Elder_c: Eldercare deduction ceiling
Returns
-------
care_deduction: Total above the line deductions for dependent care.
"""
if earned <= ALD_Dependents_thd[MARS - 1]:
care_deduction = (((1. - ALD_Dependents_hc) * nu13 *
ALD_Dependents_Child_c) +
((1. - ALD_Dependents_hc) * elderly_dependents *
ALD_Dependents_Elder_c))
else:
care_deduction = 0.
return care_deduction
@iterate_jit(nopython=True)
def Adj(e03150, e03210, c03260,
e03270, e03300, e03400, e03500, e00800,
e03220, e03230, e03240, e03290, care_deduction,
ALD_StudentLoan_hc, ALD_SelfEmp_HealthIns_hc, ALD_KEOGH_SEP_hc,
ALD_EarlyWithdraw_hc, ALD_AlimonyPaid_hc, ALD_AlimonyReceived_hc,
ALD_EducatorExpenses_hc, ALD_HSADeduction_hc, ALD_IRAContributions_hc,
ALD_DomesticProduction_hc, ALD_Tuition_hc,
c02900):
"""
Adj calculates Form 1040 AGI adjustments (i.e., Above-the-Line Deductions).
Notes
-----
Taxpayer characteristics:
e03210 : Student loan interest paid
e03220 : Educator expenses
e03150 : Total deductible IRA plan contributions
e03230 : Tuition and fees (Form 8917)
e03240 : Domestic production activity deduction (Form 8903)
c03260 : Self-employment tax deduction (after haircut)
e03270 : Self-employed health insurance premiums
e03290 : HSA deduction (Form 8889)
e03300 : Total deductible KEOGH/SEP/SIMPLE/etc. plan contributions
e03400 : Penalty on early withdrawal of savings deduction
e03500 : Alimony paid
e00800 : Alimony received
care_deduction : Dependent care expense deduction
Tax law parameters:
ALD_StudentLoan_hc : Student loan interest deduction haircut
ALD_SelfEmp_HealthIns_hc : Self-employed h.i. deduction haircut
ALD_KEOGH_SEP_hc : KEOGH/etc. plan contribution deduction haircut
ALD_EarlyWithdraw_hc : Penalty on early withdrawal deduction haricut
ALD_AlimonyPaid_hc : Alimony paid deduction haircut
ALD_AlimonyReceived_hc : Alimony received deduction haircut
ALD_EducatorExpenses_hc: Eductor expenses haircut
ALD_HSADeduction_hc: HSA Deduction haircut
ALD_IRAContributions_hc: IRA Contribution haircut
ALD_DomesticProduction_hc: Domestic production haircut
ALD_Tuition_hc: Tuition and fees haircut
Returns
-------
c02900 : total Form 1040 adjustments, which are not included in AGI
"""
# Form 2555 foreign earned income exclusion is assumed to be zero
# Form 1040 adjustments that are included in expanded income:
c02900 = ((1. - ALD_StudentLoan_hc) * e03210 +
c03260 +
(1. - ALD_EarlyWithdraw_hc) * e03400 +
(1. - ALD_AlimonyPaid_hc) * e03500 +
(1. - ALD_AlimonyReceived_hc) * e00800 +
(1. - ALD_EducatorExpenses_hc) * e03220 +
(1. - ALD_Tuition_hc) * e03230 +
(1. - ALD_DomesticProduction_hc) * e03240 +
(1. - ALD_HSADeduction_hc) * e03290 +
(1. - ALD_SelfEmp_HealthIns_hc) * e03270 +
(1. - ALD_IRAContributions_hc) * e03150 +
(1. - ALD_KEOGH_SEP_hc) * e03300 +
care_deduction)
return c02900
@iterate_jit(nopython=True)
def ALD_InvInc_ec_base(p22250, p23250, sep,
e00300, e00600, e01100, e01200,
invinc_ec_base):
"""
Computes invinc_ec_base.
"""
# limitation on net short-term and long-term capital losses
cgain = max((-3000. / sep), p22250 + p23250)
# compute exclusion of investment income from AGI
invinc_ec_base = e00300 + e00600 + cgain + e01100 + e01200
return invinc_ec_base
@iterate_jit(nopython=True)
def CapGains(p23250, p22250, sep, ALD_StudentLoan_hc,
ALD_InvInc_ec_rt, invinc_ec_base,
e00200, e00300, e00600, e00650, e00700, e00800,
CG_nodiff, CG_ec, CG_reinvest_ec_rt,
ALD_BusinessLosses_c, MARS,
e00900, e01100, e01200, e01400, e01700, e02000, e02100,
e02300, e00400, e02400, c02900, e03210, e03230, e03240,
c01000, c23650, ymod, ymod1, invinc_agi_ec,
gains_at_death, CG_death, CG_death_ec):
"""
CapGains function: ...
"""
# compute taxable portion of capital gains at death (gains_at_death - CG_death_ec)
if CG_death is True:
taxable_gains_at_death = max(0., gains_at_death - CG_death_ec[MARS-1])
else:
taxable_gains_at_death = 0.
# net capital gain (long term + short term + gains at death) before exclusion
c23650 = p23250 + p22250 + taxable_gains_at_death
# limitation on capital losses
c01000 = max((-3000. / sep), c23650)
# compute total investment income
invinc = e00300 + e00600 + c01000 + e01100 + e01200
# compute exclusion of investment income from AGI
invinc_agi_ec = ALD_InvInc_ec_rt * max(0., invinc_ec_base)
# compute ymod1 variable that is included in AGI
ymod1 = (e00200 + e00700 + e00800 + e01400 + e01700 +
invinc - invinc_agi_ec + e02100 + e02300 +
max(e00900 + e02000, -ALD_BusinessLosses_c[MARS - 1]))
if CG_nodiff:
# apply QDIV+CG exclusion if QDIV+LTCG receive no special tax treatment
qdcg_pos = max(0., e00650 + c01000)
qdcg_exclusion = (min(CG_ec, qdcg_pos) +
CG_reinvest_ec_rt * max(0., qdcg_pos - CG_ec))
ymod1 = max(0., ymod1 - qdcg_exclusion)
invinc_agi_ec += qdcg_exclusion
# compute ymod variable that is used in OASDI benefit taxation logic
ymod2 = e00400 + (0.50 * e02400) - c02900
ymod3 = (1. - ALD_StudentLoan_hc) * e03210 + e03230 + e03240
ymod = ymod1 + ymod2 + ymod3
return (c01000, c23650, ymod, ymod1, invinc_agi_ec,
gains_at_death, taxable_gains_at_death)
@iterate_jit(nopython=True)
def SSBenefits(MARS, ymod, e02400, SS_thd50, SS_thd85,
SS_percentage1, SS_percentage2, c02500):
"""
Calculates OASDI benefits included in AGI, c02500.
"""
if ymod < SS_thd50[MARS - 1]:
c02500 = 0.
elif ymod < SS_thd85[MARS - 1]:
c02500 = SS_percentage1 * min(ymod - SS_thd50[MARS - 1], e02400)
else:
c02500 = min(SS_percentage2 * (ymod - SS_thd85[MARS - 1]) +
SS_percentage1 *
min(e02400, SS_thd85[MARS - 1] -
SS_thd50[MARS - 1]), SS_percentage2 * e02400)
return c02500
@iterate_jit(nopython=True)
def UBI(nu18, n1820, n21, UBI_u18, UBI_1820, UBI_21, UBI_ecrt,
ubi, taxable_ubi, nontaxable_ubi):
"""
Calculates total and taxable Universal Basic Income (UBI) amount.
Parameters
----------
nu18: Number of people in the tax unit under 18
n1820: Number of people in the tax unit age 18-20
n21: Number of people in the tax unit age 21+
UBI_u18: UBI benefit for those under 18
UBI_1820: UBI benefit for those between 18 to 20
UBI_21: UBI benefit for those 21 or more
UBI_ecrt: Fraction of UBI benefits that are not included in AGI
Returns
-------
ubi: total UBI received by the tax unit (is included in expanded_income)
taxable_ubi: amount of UBI that is taxable (is added to AGI)
nontaxable_ubi: amount of UBI that is nontaxable
"""
ubi = nu18 * UBI_u18 + n1820 * UBI_1820 + n21 * UBI_21
taxable_ubi = ubi * (1. - UBI_ecrt)
nontaxable_ubi = ubi - taxable_ubi
return ubi, taxable_ubi, nontaxable_ubi
@iterate_jit(nopython=True)
def AGI(ymod1, c02500, c02900, XTOT, MARS, sep, DSI, exact, nu18, taxable_ubi,
II_em, II_em_ps, II_prt, II_no_em_nu18,
c00100, pre_c04600, c04600):
"""
Computes Adjusted Gross Income (AGI), c00100, and
compute personal exemption amount, c04600.
"""
# calculate AGI assuming no foreign earned income exclusion
c00100 = ymod1 + c02500 - c02900 + taxable_ubi
# calculate personal exemption amount
if II_no_em_nu18: # repeal of personal exemptions for deps. under 18
pre_c04600 = max(0, XTOT - nu18) * II_em
else:
pre_c04600 = XTOT * II_em
if DSI:
pre_c04600 = 0.
# phase-out personal exemption amount
if exact == 1: # exact calculation as on tax forms
line5 = max(0., c00100 - II_em_ps[MARS - 1])
line6 = math.ceil(line5 / (2500. / sep))
line7 = II_prt * line6
c04600 = max(0., pre_c04600 * (1. - line7))
else: # smoothed calculation needed for sensible mtr calculation
dispc_numer = II_prt * (c00100 - II_em_ps[MARS - 1])
dispc_denom = 2500. / sep
dispc = min(1., max(0., dispc_numer / dispc_denom))
c04600 = pre_c04600 * (1. - dispc)
return (c00100, pre_c04600, c04600)
@iterate_jit(nopython=True)
def ItemDedCap(e17500, e18400, e18500, e19200, e19800, e20100, e20400, g20500,
c00100, ID_AmountCap_rt, ID_AmountCap_Switch, e17500_capped,
e18400_capped, e18500_capped, e19200_capped, e19800_capped,
e20100_capped, e20400_capped, g20500_capped):
"""
Applies a cap to gross itemized deductions.
Notes
-----
Tax Law Parameters:
ID_AmountCap_Switch : Indicator for which itemized deductions are
capped
ID_AmountCap_rt : Cap on itemized deductions; decimal fraction of AGI
Taxpayer Characteristics:
e17500 : Medical expenses
e18400 : State and local taxes
e18500 : Real-estate taxes
e19200 : Interest paid
e19800 : Charity cash contributions
e20100 : Charity noncash contributions
e20400 : Total miscellaneous expenses
g20500 : Gross casualty or theft loss (before disregard)
c00100: Adjusted Gross Income
Returns
-------
e17500_capped: Medical expenses, capped by ItemDedCap
e18400_capped: State and local taxes, capped by ItemDedCap
e18500_capped : Real-estate taxes, capped by ItemDedCap
e19200_capped : Interest paid, capped by ItemDedCap
e19800_capped : Charity cash contributions, capped by ItemDedCap
e20100_capped : Charity noncash contributions, capped by ItemDedCap
e20400_capped : Total miscellaneous expenses, capped by ItemDedCap
g20500_capped : Gross casualty or theft loss (before disregard),
capped by ItemDedCap
"""
# pylint: disable=too-many-branches
cap = max(0., ID_AmountCap_rt * c00100)
gross_ded_amt = 0
if ID_AmountCap_Switch[0]: # medical
gross_ded_amt += e17500
if ID_AmountCap_Switch[1]: # statelocal
gross_ded_amt += e18400
if ID_AmountCap_Switch[2]: # realestate
gross_ded_amt += e18500
if ID_AmountCap_Switch[3]: # casualty
gross_ded_amt += g20500
if ID_AmountCap_Switch[4]: # misc
gross_ded_amt += e20400
if ID_AmountCap_Switch[5]: # interest
gross_ded_amt += e19200
if ID_AmountCap_Switch[6]: # charity
gross_ded_amt += e19800 + e20100
overage = max(0., gross_ded_amt - cap)
e17500_capped = e17500
e18400_capped = e18400
e18500_capped = e18500
g20500_capped = g20500
e20400_capped = e20400
e19200_capped = e19200
e19800_capped = e19800
e20100_capped = e20100
if overage > 0. and c00100 > 0.:
if ID_AmountCap_Switch[0]: # medical
e17500_capped -= (e17500 / gross_ded_amt) * overage
if ID_AmountCap_Switch[1]: # statelocal
e18400_capped -= (e18400 / (gross_ded_amt) * overage)
if ID_AmountCap_Switch[2]: # realestate
e18500_capped -= (e18500 / gross_ded_amt) * overage
if ID_AmountCap_Switch[3]: # casualty
g20500_capped -= (g20500 / gross_ded_amt) * overage
if ID_AmountCap_Switch[4]: # misc
e20400_capped -= (e20400 / gross_ded_amt) * overage
if ID_AmountCap_Switch[5]: # interest
e19200_capped -= (e19200 / gross_ded_amt) * overage
if ID_AmountCap_Switch[6]: # charity
e19800_capped -= (e19800 / gross_ded_amt) * overage
e20100_capped -= (e20100 / gross_ded_amt) * overage
return (e17500_capped, e18400_capped, e18500_capped, g20500_capped,
e20400_capped, e19200_capped, e19800_capped, e20100_capped)
@iterate_jit(nopython=True)
def ItemDed(e17500_capped, e18400_capped, e18500_capped, e19200_capped,
e19800_capped, e20100_capped, e20400_capped, g20500_capped,
MARS, age_head, age_spouse, c00100, c04470, c21040, c21060,
c17000, c18300, c19200, c19700, c20500, c20800,
ID_ps, ID_Medical_frt, ID_Medical_frt_add4aged, ID_Medical_hc,
ID_Casualty_frt, ID_Casualty_hc, ID_Miscellaneous_frt,
ID_Miscellaneous_hc, ID_Charity_crt_all, ID_Charity_crt_noncash,
ID_prt, ID_crt, ID_c, ID_StateLocalTax_hc, ID_Charity_frt,
ID_Charity_hc, ID_InterestPaid_hc, ID_RealEstate_hc,
ID_Medical_c, ID_StateLocalTax_c, ID_RealEstate_c,
ID_InterestPaid_c, ID_Charity_c, ID_Casualty_c,
ID_Miscellaneous_c, ID_AllTaxes_c, ID_AllTaxes_hc,
ID_StateLocalTax_crt, ID_RealEstate_crt, ID_Charity_f):
"""
Calculates itemized deductions, Form 1040, Schedule A.
Notes
-----
Tax Law Parameters:
ID_ps : Itemized deduction phaseout AGI start (Pease)
ID_crt : Itemized deduction maximum phaseout
as a decimal fraction of total itemized deduction (Pease)
ID_prt : Itemized deduction phaseout rate (Pease)
ID_c: Dollar limit on itemized deductions
ID_Medical_frt : Deduction for medical expenses;
floor as a decimal fraction of AGI
ID_Medical_frt_add4aged : Addon for medical expenses deduction for
elderly; addon as a decimal fraction of AGI
ID_Casualty_frt : Deduction for casualty loss;
floor as a decimal fraction of AGI
ID_Miscellaneous_frt : Deduction for miscellaneous expenses;
floor as a decimal fraction of AGI
ID_Charity_crt_all : Deduction for all charitable contributions;
ceiling as a decimal fraction of AGI
ID_Charity_crt_noncash : Deduction for noncash charitable
contributions; ceiling as a decimal
fraction of AGI
ID_Charity_frt : Disregard for charitable contributions;
floor as a decimal fraction of AGI
ID_Medical_c : Ceiling on medical expense deduction
ID_StateLocalTax_c : Ceiling on state and local tax deduction
ID_RealEstate_c : Ceiling on real estate tax deduction
ID_AllTaxes_c: Ceiling combined state and local income/sales and
real estate tax deductions
ID_InterestPaid_c : Ceiling on interest paid deduction
ID_Charity_c : Ceiling on charity expense deduction
ID_Charity_f: Floor on charity expense deduction
ID_Casualty_c : Ceiling on casuality expense deduction
ID_Miscellaneous_c : Ceiling on miscellaneous expense deduction
ID_StateLocalTax_crt : Deduction for state and local taxes;
ceiling as a decimal fraction of AGI
ID_RealEstate_crt : Deduction for real estate taxes;
ceiling as a decimal fraction of AGI
Taxpayer Characteristics:
e17500_capped : Medical expenses, capped by ItemDedCap
e18400_capped : State and local taxes, capped by ItemDedCap
e18500_capped : Real-estate taxes, capped by ItemDedCap
e19200_capped : Interest paid, capped by ItemDedCap
e19800_capped : Charity cash contributions, capped by ItemDedCap
e20100_capped : Charity noncash contributions, capped by ItemDedCap
e20400_capped : Total miscellaneous expenses, capped by ItemDedCap
g20500_capped : Gross casualty or theft loss (before disregard),
capped by ItemDedCap
Returns
-------
c04470 : total itemized deduction amount (and other intermediate variables)
"""
posagi = max(c00100, 0.)
# Medical
medical_frt = ID_Medical_frt
if age_head >= 65 or (MARS == 2 and age_spouse >= 65):
medical_frt += ID_Medical_frt_add4aged
c17750 = medical_frt * posagi
c17000 = max(0., e17500_capped - c17750) * (1. - ID_Medical_hc)
c17000 = min(c17000, ID_Medical_c[MARS - 1])
# State and local taxes
c18400 = min((1. - ID_StateLocalTax_hc) * max(e18400_capped, 0.),
ID_StateLocalTax_c[MARS - 1])
c18500 = min((1. - ID_RealEstate_hc) * e18500_capped,
ID_RealEstate_c[MARS - 1])
# following two statements implement a cap on c18400 and c18500 in a way
# that those with negative AGI, c00100, are not capped under current law,
# hence the 0.0001 rather than zero
c18400 = min(c18400, ID_StateLocalTax_crt * max(c00100, 0.0001))
c18500 = min(c18500, ID_RealEstate_crt * max(c00100, 0.0001))
c18300 = (c18400 + c18500) * (1. - ID_AllTaxes_hc)
c18300 = min(c18300, ID_AllTaxes_c[MARS - 1])
# Interest paid
c19200 = e19200_capped * (1. - ID_InterestPaid_hc)
c19200 = min(c19200, ID_InterestPaid_c[MARS - 1])
# Charity
lim30 = min(ID_Charity_crt_noncash * posagi, e20100_capped)
c19700 = min(ID_Charity_crt_all * posagi, lim30 + e19800_capped)
# charity floor is zero in present law
charity_floor = max(ID_Charity_frt * posagi, ID_Charity_f[MARS - 1])
c19700 = max(0., c19700 - charity_floor) * (1. - ID_Charity_hc)
c19700 = min(c19700, ID_Charity_c[MARS - 1])
# Casualty
c20500 = (max(0., g20500_capped - ID_Casualty_frt * posagi) *
(1. - ID_Casualty_hc))
c20500 = min(c20500, ID_Casualty_c[MARS - 1])
# Miscellaneous
c20400 = e20400_capped
c20750 = ID_Miscellaneous_frt * posagi
c20800 = max(0., c20400 - c20750) * (1. - ID_Miscellaneous_hc)
c20800 = min(c20800, ID_Miscellaneous_c[MARS - 1])
# Gross total itemized deductions
c21060 = c17000 + c18300 + c19200 + c19700 + c20500 + c20800
# Limitations on total itemized deductions
# (no attempt to adjust c04470 components for limitations)
nonlimited = c17000 + c20500
limitstart = ID_ps[MARS - 1]
if c21060 > nonlimited and c00100 > limitstart:
dedmin = ID_crt * (c21060 - nonlimited)
dedpho = ID_prt * max(0., posagi - limitstart)
c21040 = min(dedmin, dedpho)
c04470 = c21060 - c21040
else:
c21040 = 0.
c04470 = c21060
c04470 = min(c04470, ID_c[MARS - 1])
# Return total itemized deduction amounts and components
return (c17000, c18300, c19200, c19700, c20500, c20800,
c21040, c21060, c04470)
@iterate_jit(nopython=True)
def AdditionalMedicareTax(e00200, MARS,
AMEDT_ec, sey, AMEDT_rt,
FICA_mc_trt, FICA_ss_trt,
ptax_amc, payrolltax):
"""
Computes Additional Medicare Tax (Form 8959) included in payroll taxes.
Notes
-----
Tax Law Parameters:
AMEDT_ec : Additional Medicare Tax earnings exclusion
AMEDT_rt : Additional Medicare Tax rate
FICA_ss_trt : FICA Social Security tax rate
FICA_mc_trt : FICA Medicare tax rate
Taxpayer Charateristics:
e00200 : Wages and salaries
sey : Self-employment income
Returns
-------
ptax_amc : Additional Medicare Tax
payrolltax : payroll tax augmented by Additional Medicare Tax
"""
line8 = max(0., sey) * (1. - 0.5 * (FICA_mc_trt + FICA_ss_trt))
line11 = max(0., AMEDT_ec[MARS - 1] - e00200)
ptax_amc = AMEDT_rt * (max(0., e00200 - AMEDT_ec[MARS - 1]) +
max(0., line8 - line11))
payrolltax += ptax_amc
return (ptax_amc, payrolltax)
@iterate_jit(nopython=True)
def StdDed(DSI, earned, STD, age_head, age_spouse, STD_Aged, STD_Dep,
MARS, MIDR, blind_head, blind_spouse, standard, c19700,
STD_allow_charity_ded_nonitemizers):
"""
Calculates standard deduction, including standard deduction for
dependents, aged and bind.
Notes
-----
Tax Law Parameters:
STD : Standard deduction amount, filing status dependent
STD_Dep : Standard deduction for dependents
STD_Aged : Additional standard deduction for blind and aged
Taxpayer Characteristics:
earned : Form 2441 earned income amount
e02400 : Gross Social Security Benefit
DSI : Dependent Status Indicator:
0 - not being claimed as a dependent
1 - claimed as a dependent
MIDR : Married filing separately itemized deductions
requirement indicator:
0 - not necessary to itemize because of filing status
1 - necessary to itemize when filing separately
Returns
-------
standard : the standard deduction amount for filing unit
"""
# calculate deduction for dependents
if DSI == 1:
c15100 = max(350. + earned, STD_Dep)
basic_stded = min(STD[MARS - 1], c15100)
else:
c15100 = 0.
if MIDR == 1:
basic_stded = 0.
else:
basic_stded = STD[MARS - 1]
# calculate extra standard deduction for aged and blind
num_extra_stded = blind_head + blind_spouse
if age_head >= 65:
num_extra_stded += 1
if MARS == 2 and age_spouse >= 65:
num_extra_stded += 1
extra_stded = num_extra_stded * STD_Aged[MARS - 1]
# calculate the total standard deduction
standard = basic_stded + extra_stded
if MARS == 3 and MIDR == 1:
standard = 0.
if STD_allow_charity_ded_nonitemizers:
standard += c19700
return standard
@iterate_jit(nopython=True)
def TaxInc(c00100, standard, c04470, c04600, MARS, e00900, e26270,
e02100, e27200, e00650, c01000,
PT_SSTB_income, PT_binc_w2_wages, PT_ubia_property,
PT_qbid_rt, PT_qbid_taxinc_thd, PT_qbid_taxinc_gap,
PT_qbid_w2_wages_rt,
PT_qbid_alt_w2_wages_rt, PT_qbid_alt_property_rt,
c04800, qbided, StudentLoan_em, studloan_debt, sldf):
"""
Calculates taxable income, c04800, and
qualified business income deduction, qbided.
"""
# calculate taxable income before qualified business income deduction
pre_qbid_taxinc = max(0., c00100 - max(c04470, standard) - c04600)
# calculate qualified business income deduction
qbided = 0.
qbinc = max(0., e00900 + e26270 + e02100 + e27200)
qbided_full = qbinc * PT_qbid_rt
if PT_qbid_taxinc_thd[MARS-1] > 0:
if pre_qbid_taxinc < PT_qbid_taxinc_thd[MARS-1]:
qbided = qbided_full
else:
qbided = max(0., qbided_full * (1 - (pre_qbid_taxinc - PT_qbid_taxinc_thd[MARS-1])/ PT_qbid_taxinc_gap[MARS-1]))
else:
qbided = qbided_full
"""
if qbinc > 0. and PT_qbid_rt > 0.:
qbid_before_limits = qbinc * PT_qbid_rt
lower_thd = PT_qbid_taxinc_thd[MARS - 1]
if pre_qbid_taxinc <= lower_thd:
qbided = qbid_before_limits
else:
pre_qbid_taxinc_gap = PT_qbid_taxinc_gap[MARS - 1]
upper_thd = lower_thd + pre_qbid_taxinc_gap
if PT_SSTB_income == 1 and pre_qbid_taxinc >= upper_thd:
qbided = 0.
else:
wage_cap = PT_binc_w2_wages * PT_qbid_w2_wages_rt
alt_cap = (PT_binc_w2_wages * PT_qbid_alt_w2_wages_rt +
PT_ubia_property * PT_qbid_alt_property_rt)
full_cap = max(wage_cap, alt_cap)
if PT_SSTB_income == 0 and pre_qbid_taxinc >= upper_thd:
# apply full cap
qbided = min(full_cap, qbid_before_limits)
elif PT_SSTB_income == 0 and pre_qbid_taxinc < upper_thd:
# apply adjusted cap as in Part III of Worksheet 12-A
# in 2018 IRS Publication 535 (Chapter 12)
prt = (pre_qbid_taxinc - lower_thd) / pre_qbid_taxinc_gap
adj = prt * (qbid_before_limits - full_cap)
qbided = qbid_before_limits - adj
else: # PT_SSTB_income == 1 and pre_qbid_taxinc < upper_thd
prti = (upper_thd - pre_qbid_taxinc) / pre_qbid_taxinc_gap
qbid_adjusted = prti * qbid_before_limits
cap_adjusted = prti * full_cap
prt = (pre_qbid_taxinc - lower_thd) / pre_qbid_taxinc_gap
adj = prt * (qbid_adjusted - cap_adjusted)
qbided = qbid_adjusted - adj
"""
# apply taxinc cap (assuning cap rate is equal to PT_qbid_rt)
net_cg = e00650 + c01000 # per line 34 in 2018 Pub 535 Worksheet 12-A
taxinc_cap = PT_qbid_rt * max(0., pre_qbid_taxinc - net_cg)
qbided = min(qbided, taxinc_cap)
# exclude forgiven student loan debt from taxable income
if StudentLoan_em is True:
base_sldf = max(0., studloan_debt)
else:
base_sldf = 0.
# exclusion is limited to tax inc
sldf = max(0., min(pre_qbid_taxinc - qbided, base_sldf))
# calculate taxable income after qualified business income deduction
c04800 = max(0., pre_qbid_taxinc - qbided - sldf)
return (c04800, qbided, sldf)
@JIT(nopython=True)
def SchXYZ(taxable_income, MARS, e00900, e26270, e02000, e00200,
PT_rt1, PT_rt2, PT_rt3, PT_rt4, PT_rt5,
PT_rt6, PT_rt7, PT_rt8,
PT_brk1, PT_brk2, PT_brk3, PT_brk4, PT_brk5,
PT_brk6, PT_brk7,
II_rt1, II_rt2, II_rt3, II_rt4, II_rt5,
II_rt6, II_rt7, II_rt8,
II_brk1, II_brk2, II_brk3, II_brk4, II_brk5,
II_brk6, II_brk7, PT_EligibleRate_active,
PT_EligibleRate_passive, PT_wages_active_income,
PT_top_stacking):
"""
Returns Schedule X, Y, Z tax amount for specified taxable_income.
"""
# separate non-negative taxable income into two non-negative components,
# doing this in a way so that the components add up to taxable income
# define pass-through income eligible for PT schedule
pt_passive = PT_EligibleRate_passive * (e02000 - e26270)
pt_active_gross = e00900 + e26270
if (pt_active_gross > 0) and PT_wages_active_income:
pt_active_gross = pt_active_gross + e00200
pt_active = PT_EligibleRate_active * pt_active_gross
pt_active = min(pt_active, e00900 + e26270)
pt_taxinc = max(0., pt_passive + pt_active)
if pt_taxinc >= taxable_income:
pt_taxinc = taxable_income
reg_taxinc = 0.
else:
# pt_taxinc is unchanged
reg_taxinc = taxable_income - pt_taxinc
# determine stacking order
if PT_top_stacking:
reg_tbase = 0.
pt_tbase = reg_taxinc
else:
reg_tbase = pt_taxinc
pt_tbase = 0.
# compute Schedule X,Y,Z tax using the two components of taxable income
if reg_taxinc > 0.:
reg_tax = Taxes(reg_taxinc, MARS, reg_tbase,
II_rt1, II_rt2, II_rt3, II_rt4,
II_rt5, II_rt6, II_rt7, II_rt8, II_brk1, II_brk2,
II_brk3, II_brk4, II_brk5, II_brk6, II_brk7)
else:
reg_tax = 0.
if pt_taxinc > 0.:
pt_tax = Taxes(pt_taxinc, MARS, pt_tbase,
PT_rt1, PT_rt2, PT_rt3, PT_rt4,
PT_rt5, PT_rt6, PT_rt7, PT_rt8, PT_brk1, PT_brk2,
PT_brk3, PT_brk4, PT_brk5, PT_brk6, PT_brk7)
else:
pt_tax = 0.
return reg_tax + pt_tax
@iterate_jit(nopython=True)
def SchXYZTax(c04800, MARS, e00900, e26270, e02000, e00200,
PT_rt1, PT_rt2, PT_rt3, PT_rt4, PT_rt5,
PT_rt6, PT_rt7, PT_rt8,
PT_brk1, PT_brk2, PT_brk3, PT_brk4, PT_brk5,
PT_brk6, PT_brk7,
II_rt1, II_rt2, II_rt3, II_rt4, II_rt5,
II_rt6, II_rt7, II_rt8,
II_brk1, II_brk2, II_brk3, II_brk4, II_brk5,
II_brk6, II_brk7, PT_EligibleRate_active,
PT_EligibleRate_passive, PT_wages_active_income,
PT_top_stacking, c05200):
"""
SchXYZTax calls SchXYZ function and sets c05200 to returned amount.
"""
c05200 = SchXYZ(c04800, MARS, e00900, e26270, e02000, e00200,
PT_rt1, PT_rt2, PT_rt3, PT_rt4, PT_rt5,
PT_rt6, PT_rt7, PT_rt8,
PT_brk1, PT_brk2, PT_brk3, PT_brk4, PT_brk5,
PT_brk6, PT_brk7,
II_rt1, II_rt2, II_rt3, II_rt4, II_rt5,
II_rt6, II_rt7, II_rt8,
II_brk1, II_brk2, II_brk3, II_brk4, II_brk5,
II_brk6, II_brk7, PT_EligibleRate_active,
PT_EligibleRate_passive, PT_wages_active_income,
PT_top_stacking)
return c05200
@iterate_jit(nopython=True)
def GainsTax(e00650, c01000, c23650, p23250, e01100, e58990, e00200,
e24515, e24518, MARS, c04800, c05200, e00900, e26270, e02000,
II_rt1, II_rt2, II_rt3, II_rt4, II_rt5, II_rt6, II_rt7, II_rt8,
II_brk1, II_brk2, II_brk3, II_brk4, II_brk5, II_brk6, II_brk7,
PT_rt1, PT_rt2, PT_rt3, PT_rt4, PT_rt5, PT_rt6, PT_rt7, PT_rt8,
PT_brk1, PT_brk2, PT_brk3, PT_brk4, PT_brk5, PT_brk6, PT_brk7,
CG_nodiff, PT_EligibleRate_active, PT_EligibleRate_passive,
PT_wages_active_income, PT_top_stacking,
CG_rt1, CG_rt2, CG_rt3, CG_rt4, CG_brk1, CG_brk2, CG_brk3,
dwks10, dwks13, dwks14, dwks19, c05700, taxbc):
"""
GainsTax function implements (2015) Schedule D Tax Worksheet logic for
the special taxation of long-term capital gains and qualified dividends
if CG_nodiff is false.
"""
# pylint: disable=too-many-statements
if c01000 > 0. or c23650 > 0. or p23250 > 0. or e01100 > 0. or e00650 > 0.:
hasqdivltcg = 1 # has qualified dividends or long-term capital gains
else:
hasqdivltcg = 0 # no qualified dividends or long-term capital gains
if CG_nodiff:
hasqdivltcg = 0 # no special taxation of qual divids and l-t cap gains
if hasqdivltcg == 1:
dwks1 = c04800
dwks2 = e00650
dwks3 = e58990
dwks4 = 0. # always assumed to be zero
dwks5 = max(0., dwks3 - dwks4)
dwks6 = max(0., dwks2 - dwks5)
dwks7 = min(p23250, c23650) # SchD lines 15 and 16, respectively
# dwks8 = min(dwks3, dwks4)
# dwks9 = max(0., dwks7 - dwks8)
# BELOW TWO STATEMENTS ARE UNCLEAR IN LIGHT OF dwks9=... COMMENT
if e01100 > 0.:
c24510 = e01100
else:
c24510 = max(0., dwks7) + e01100
dwks9 = max(0., c24510 - min(0., e58990))
# ABOVE TWO STATEMENTS ARE UNCLEAR IN LIGHT OF dwks9=... COMMENT
dwks10 = dwks6 + dwks9
dwks11 = e24515 + e24518 # SchD lines 18 and 19, respectively
dwks12 = min(dwks9, dwks11)
dwks13 = dwks10 - dwks12
dwks14 = max(0., dwks1 - dwks13)
dwks16 = min(CG_brk1[MARS - 1], dwks1)
dwks17 = min(dwks14, dwks16)
dwks18 = max(0., dwks1 - dwks10)
dwks19 = max(dwks17, dwks18)
dwks20 = dwks16 - dwks17
lowest_rate_tax = CG_rt1 * dwks20
# break in worksheet lines
dwks21 = min(dwks1, dwks13)
dwks22 = dwks20
dwks23 = max(0., dwks21 - dwks22)
dwks25 = min(CG_brk2[MARS - 1], dwks1)
dwks26 = dwks19 + dwks20
dwks27 = max(0., dwks25 - dwks26)
dwks28 = min(dwks23, dwks27)
dwks29 = CG_rt2 * dwks28
dwks30 = dwks22 + dwks28
dwks31 = dwks21 - dwks30
dwks32 = CG_rt3 * dwks31
hi_base = max(0., dwks31 - CG_brk3[MARS - 1])
hi_incremental_rate = CG_rt4 - CG_rt3
highest_rate_incremental_tax = hi_incremental_rate * hi_base
# break in worksheet lines
dwks33 = min(dwks9, e24518)
dwks34 = dwks10 + dwks19
dwks36 = max(0., dwks34 - dwks1)
dwks37 = max(0., dwks33 - dwks36)
dwks38 = 0.25 * dwks37
# break in worksheet lines
dwks39 = dwks19 + dwks20 + dwks28 + dwks31 + dwks37
dwks40 = dwks1 - dwks39
dwks41 = 0.28 * dwks40
dwks42 = SchXYZ(dwks19, MARS, e00900, e26270, e02000, e00200,
PT_rt1, PT_rt2, PT_rt3, PT_rt4, PT_rt5,
PT_rt6, PT_rt7, PT_rt8,
PT_brk1, PT_brk2, PT_brk3, PT_brk4, PT_brk5,
PT_brk6, PT_brk7,
II_rt1, II_rt2, II_rt3, II_rt4, II_rt5,
II_rt6, II_rt7, II_rt8,
II_brk1, II_brk2, II_brk3, II_brk4, II_brk5,
II_brk6, II_brk7, PT_EligibleRate_active,
PT_EligibleRate_passive, PT_wages_active_income,
PT_top_stacking)
dwks43 = (dwks29 + dwks32 + dwks38 + dwks41 + dwks42 +
lowest_rate_tax + highest_rate_incremental_tax)
dwks44 = c05200
dwks45 = min(dwks43, dwks44)
c24580 = dwks45
else: # if hasqdivltcg is zero
c24580 = c05200
dwks10 = max(0., min(p23250, c23650)) + e01100
dwks13 = 0.
dwks14 = 0.
dwks19 = 0.
# final calculations done no matter what the value of hasqdivltcg
c05100 = c24580 # because foreign earned income exclusion is assumed zero
c05700 = 0. # no Form 4972, Lump Sum Distributions
taxbc = c05700 + c05100
return (dwks10, dwks13, dwks14, dwks19, c05700, taxbc)
@iterate_jit(nopython=True)
def AGIsurtax(c00100, MARS, AGI_surtax_trt, AGI_surtax_thd, taxbc, surtax):
"""
Computes surtax on AGI above some threshold.
"""
if AGI_surtax_trt > 0.:
hiAGItax = AGI_surtax_trt * max(c00100 - AGI_surtax_thd[MARS - 1], 0.)
taxbc += hiAGItax
surtax += hiAGItax
return (taxbc, surtax)
@iterate_jit(nopython=True)
def AMT(e07300, dwks13, standard, f6251, c00100, c18300, taxbc,
c04470, c17000, c20800, c21040, e24515, MARS, sep, dwks19,
dwks14, c05700, e62900, e00700, dwks10, age_head, age_spouse,
earned, cmbtp,
AMT_child_em_c_age, AMT_brk1,
AMT_em, AMT_prt, AMT_rt1, AMT_rt2,
AMT_child_em, AMT_em_ps, AMT_em_pe,
AMT_CG_brk1, AMT_CG_brk2, AMT_CG_brk3, AMT_CG_rt1, AMT_CG_rt2,
AMT_CG_rt3, AMT_CG_rt4, c05800, c09600, c62100):
"""
Computes Alternative Minimum Tax (AMT) taxable income and liability, where
c62100 is AMT taxable income,
c09600 is AMT tax liability, and
c05800 is total (regular + AMT) income tax liability before credits.
Note that line-number variable names refer to 2015 Form 6251.
"""
# pylint: disable=too-many-statements,too-many-branches
# Form 6251, Part I
if standard == 0.0:
c62100 = (c00100 - e00700 - c04470 +
max(0., min(c17000, 0.025 * c00100)) +
c18300 + c20800 - c21040)
if standard > 0.0:
c62100 = c00100 - e00700
c62100 += cmbtp # add income not in AGI but considered income for AMT
if MARS == 3:
amtsepadd = max(0.,
min(AMT_em[MARS - 1], AMT_prt * (c62100 - AMT_em_pe)))
else:
amtsepadd = 0.
c62100 = c62100 + amtsepadd # AMT taxable income, which is line28
# Form 6251, Part II top
line29 = max(0., AMT_em[MARS - 1] - AMT_prt *
max(0., c62100 - AMT_em_ps[MARS - 1]))
young_head = age_head != 0 and age_head < AMT_child_em_c_age
no_or_young_spouse = age_spouse < AMT_child_em_c_age
if young_head and no_or_young_spouse:
line29 = min(line29, earned + AMT_child_em)
line30 = max(0., c62100 - line29)
line3163 = (AMT_rt1 * line30 +
AMT_rt2 * max(0., (line30 - (AMT_brk1 / sep))))
if dwks10 > 0. or dwks13 > 0. or dwks14 > 0. or dwks19 > 0. or e24515 > 0.:
# complete Form 6251, Part III (line36 is equal to line30)
line37 = dwks13
line38 = e24515
line39 = min(line37 + line38, dwks10)
line40 = min(line30, line39)
line41 = max(0., line30 - line40)
line42 = (AMT_rt1 * line41 +
AMT_rt2 * max(0., (line41 - (AMT_brk1 / sep))))
line44 = dwks14
line45 = max(0., AMT_CG_brk1[MARS - 1] - line44)
line46 = min(line30, line37)
line47 = min(line45, line46) # line47 is amount taxed at AMT_CG_rt1
cgtax1 = line47 * AMT_CG_rt1
line48 = line46 - line47
line51 = dwks19
line52 = line45 + line51
line53 = max(0., AMT_CG_brk2[MARS - 1] - line52)
line54 = min(line48, line53) # line54 is amount taxed at AMT_CG_rt2
cgtax2 = line54 * AMT_CG_rt2
line56 = line47 + line54 # total amount in lower two brackets
if line41 == line56:
line57 = 0. # line57 is amount taxed at AMT_CG_rt3
linex2 = 0. # linex2 is amount taxed at AMT_CG_rt4
else:
line57 = line46 - line56
linex1 = min(line48,
max(0., AMT_CG_brk3[MARS - 1] - line44 - line45))
linex2 = max(0., line54 - linex1)
cgtax3 = line57 * AMT_CG_rt3
cgtax4 = linex2 * AMT_CG_rt4
if line38 == 0.:
line61 = 0.
else:
line61 = 0.25 * max(0., line30 - line41 - line56 - line57 - linex2)
line62 = line42 + cgtax1 + cgtax2 + cgtax3 + cgtax4 + line61
line64 = min(line3163, line62)
line31 = line64
else: # if not completing Form 6251, Part III
line31 = line3163
# Form 6251, Part II bottom
if f6251 == 1:
line32 = e62900
else:
line32 = e07300
line33 = line31 - line32
c09600 = max(0., line33 - max(0., taxbc - e07300 - c05700))
c05800 = taxbc + c09600
return (c62100, c09600, c05800)
@iterate_jit(nopython=True)
def NetInvIncTax(e00300, e00600, e02000, e26270, c01000,
c00100, NIIT_thd, MARS, NIIT_PT_taxed, NIIT_rt, niit):
"""
Computes Net Investment Income Tax (NIIT) amount assuming that
all annuity income is excluded from net investment income.
"""
modAGI = c00100 # no foreign earned income exclusion to add
if not NIIT_PT_taxed:
NII = max(0., e00300 + e00600 + c01000 + e02000 - e26270)
else: # do not subtract e26270 from e02000
NII = max(0., e00300 + e00600 + c01000 + e02000)
niit = NIIT_rt * min(NII, max(0., modAGI - NIIT_thd[MARS - 1]))
return niit
@iterate_jit(nopython=True)
def F2441(MARS, earned_p, earned_s, f2441, CDCC_c, e32800,
exact, c00100, CDCC_ps, CDCC_crt, c05800, e07300, c07180):
"""
Calculates Form 2441 child and dependent care expense credit, c07180.
"""
# credit for at most two cared-for individuals and for actual expenses
max_credit = min(f2441, 2) * CDCC_c
c32800 = max(0., min(e32800, max_credit))
# credit is limited to minimum of individuals' earned income
c32880 = earned_p # earned income of taxpayer
if MARS == 2:
c32890 = earned_s # earned income of spouse when present
else:
c32890 = earned_p
c33000 = max(0., min(c32800, min(c32880, c32890)))
# credit is limited by AGI-related fraction
if exact == 1: # exact calculation as on tax forms
tratio = math.ceil(max(((c00100 - CDCC_ps) / 2000.), 0.))
c33200 = c33000 * 0.01 * max(20., CDCC_crt - min(15., tratio))
else:
c33200 = c33000 * 0.01 * max(20., CDCC_crt -
max(((c00100 - CDCC_ps) / 2000.), 0.))
# credit is limited by tax liability
c07180 = min(max(0., c05800 - e07300), c33200)
return c07180
@JIT(nopython=True)
def EITCamount(basic_frac, phasein_rate, earnings, max_amount,
phaseout_start, agi, phaseout_rate):
"""
Returns EITC amount given specified parameters.
English parameter names are used in this function because the
EITC formula is not available on IRS forms or in IRS instructions;
the extensive IRS EITC look-up table does not reveal the formula.
"""
eitc = min((basic_frac * max_amount +
(1.0 - basic_frac) * phasein_rate * earnings), max_amount)
if earnings > phaseout_start or agi > phaseout_start:
eitcx = max(0., (max_amount - phaseout_rate *
max(0., max(earnings, agi) - phaseout_start)))
eitc = min(eitc, eitcx)
return eitc
@iterate_jit(nopython=True)
def EITC(MARS, DSI, EIC, c00100, e00300, e00400, e00600, c01000,
e02000, e26270, age_head, age_spouse, earned, earned_p, earned_s,
EITC_ps, EITC_MinEligAge, EITC_MaxEligAge, EITC_ps_MarriedJ,
EITC_rt, EITC_c, EITC_prt, EITC_basic_frac,
EITC_InvestIncome_c, EITC_excess_InvestIncome_rt,
EITC_indiv, EITC_sep_filers_elig,
c59660):
"""
Computes EITC amount, c59660.
"""
# pylint: disable=too-many-branches
if MARS != 2:
eitc = EITCamount(EITC_basic_frac,
EITC_rt[EIC], earned, EITC_c[EIC],
EITC_ps[EIC], c00100, EITC_prt[EIC])
if EIC == 0:
# enforce age eligibility rule for those with no EITC-eligible
# kids assuming that an unknown age_* value implies EITC age
# eligibility
h_age_elig = EITC_MinEligAge <= age_head <= EITC_MaxEligAge
if (age_head == 0 or h_age_elig):
c59660 = eitc
else:
c59660 = 0.
else: # if EIC != 0
c59660 = eitc
if MARS == 2:
po_start = EITC_ps[EIC] + EITC_ps_MarriedJ[EIC]
if not EITC_indiv:
# filing unit EITC rather than individual EITC
eitc = EITCamount(EITC_basic_frac,
EITC_rt[EIC], earned, EITC_c[EIC],
po_start, c00100, EITC_prt[EIC])
if EITC_indiv:
# individual EITC rather than a filing-unit EITC
eitc_p = EITCamount(EITC_basic_frac,
EITC_rt[EIC], earned_p, EITC_c[EIC],
po_start, earned_p, EITC_prt[EIC])
eitc_s = EITCamount(EITC_basic_frac,
EITC_rt[EIC], earned_s, EITC_c[EIC],
po_start, earned_s, EITC_prt[EIC])
eitc = eitc_p + eitc_s
if EIC == 0:
h_age_elig = EITC_MinEligAge <= age_head <= EITC_MaxEligAge
s_age_elig = EITC_MinEligAge <= age_spouse <= EITC_MaxEligAge
if (age_head == 0 or age_spouse == 0 or h_age_elig or s_age_elig):
c59660 = eitc
else:
c59660 = 0.
else:
c59660 = eitc
if (MARS == 3 and not EITC_sep_filers_elig) or DSI == 1:
c59660 = 0.
# reduce positive EITC if investment income exceeds ceiling
if c59660 > 0.:
invinc = (e00400 + e00300 + e00600 +
max(0., c01000) + max(0., (e02000 - e26270)))
if invinc > EITC_InvestIncome_c:
eitc = (c59660 - EITC_excess_InvestIncome_rt *
(invinc - EITC_InvestIncome_c))
c59660 = max(0., eitc)
return c59660
@iterate_jit(nopython=True)
def RefundablePayrollTaxCredit(was_plus_sey_p, was_plus_sey_s,
RPTC_c, RPTC_rt,
rptc_p, rptc_s, rptc):
"""
Computes refundable payroll tax credit amounts.
"""
rptc_p = min(was_plus_sey_p * RPTC_rt, RPTC_c)
rptc_s = min(was_plus_sey_s * RPTC_rt, RPTC_c)
rptc = rptc_p + rptc_s
return (rptc_p, rptc_s, rptc)
@iterate_jit(nopython=True)
def ChildDepTaxCredit(n24, MARS, c00100, XTOT, num, c05800,
e07260, CR_ResidentialEnergy_hc,
e07300, CR_ForeignTax_hc,
c07180,
c07230,
e07240, CR_RetirementSavings_hc,
c07200,
CTC_c, CTC_ps, CTC_prt, exact, ODC_c,
CTC_c_under6_bonus, nu06,
c07220, odc, codtc_limited):
"""
Computes amounts on "Child Tax Credit and Credit for Other Dependents
Worksheet" in 2018 Publication 972, which pertain to these two
nonrefundable tax credits.
"""
# Worksheet Part 1
line1 = CTC_c * n24 + CTC_c_under6_bonus * nu06
line2 = ODC_c * max(0, XTOT - n24 - num)
line3 = line1 + line2
modAGI = c00100 # no foreign earned income exclusion to add to AGI (line6)
if line3 > 0. and modAGI > CTC_ps[MARS - 1]:
excess = modAGI - CTC_ps[MARS - 1]
if exact == 1: # exact calculation as on tax forms
excess = 1000. * math.ceil(excess / 1000.)
line10 = max(0., line3 - CTC_prt * excess)
else:
line10 = line3
if line10 > 0.:
# Worksheet Part 2
line11 = c05800
line12 = (e07260 * (1. - CR_ResidentialEnergy_hc) +
e07300 * (1. - CR_ForeignTax_hc) +
c07180 + # child & dependent care expense credit
c07230 + # education credit
e07240 * (1. - CR_RetirementSavings_hc) +
c07200) # Schedule R credit
line13 = line11 - line12
line14 = 0.
line15 = max(0., line13 - line14)
line16 = min(line10, line15) # credit is capped by tax liability
else:
line16 = 0.
# separate the CTC and ODTC amounts
c07220 = 0. # nonrefundable CTC amount
odc = 0. # nonrefundable ODTC amount
if line16 > 0.:
if line1 > 0.:
c07220 = line16 * line1 / line3
odc = max(0., line16 - c07220)
# compute codtc_limited for use in AdditionalCTC function
codtc_limited = max(0., line10 - line16)
return (c07220, odc, codtc_limited)
@iterate_jit(nopython=True)
def PersonalTaxCredit(MARS, c00100,
II_credit, II_credit_ps, II_credit_prt,
II_credit_nr, II_credit_nr_ps, II_credit_nr_prt,
personal_refundable_credit,
personal_nonrefundable_credit):
"""
Computes personal_refundable_credit and personal_nonrefundable_credit,
neither of which are part of current-law policy.
"""
# calculate personal refundable credit amount with phase-out
personal_refundable_credit = II_credit[MARS - 1]
if II_credit_prt > 0. and c00100 > II_credit_ps[MARS - 1]:
pout = II_credit_prt * (c00100 - II_credit_ps[MARS - 1])
fully_phasedout = personal_refundable_credit - pout
personal_refundable_credit = max(0., fully_phasedout)
# calculate personal nonrefundable credit amount with phase-out
personal_nonrefundable_credit = II_credit_nr[MARS - 1]
if II_credit_nr_prt > 0. and c00100 > II_credit_nr_ps[MARS - 1]:
pout = II_credit_nr_prt * (c00100 - II_credit_nr_ps[MARS - 1])
fully_phasedout = personal_nonrefundable_credit - pout
personal_nonrefundable_credit = max(0., fully_phasedout)
return (personal_refundable_credit, personal_nonrefundable_credit)
@iterate_jit(nopython=True)
def | (e03150, e03300, IRADC_credit_c, IRADC_credit_rt, iradctc):
"""
Computes refundable retirement savings tax credit amount.
"""
# calculate refundable credit amount
tot_retirement_contributions = e03150 + e03300
if IRADC_credit_rt > 0.:
iradctc = min(tot_retirement_contributions * IRADC_credit_rt, IRADC_credit_c)
else:
iradctc = 0.
return (iradctc)
@iterate_jit(nopython=True)
def FTHBTaxCredit(MARS, FTHB_credit, FTHB_credit_c, c00100,
FTHB_credit_e, fthbc, fthb_credit_amt):
"""
Computes refundable first time homebuyers' tax credit amount.
"""
if FTHB_credit is True:
# max credit
fthbc = max(0., min(FTHB_credit_c, fthb_credit_amt))
# eliminated based on agi
positiveagiamt = max(c00100, 0.)
fthb_max_agi = FTHB_credit_e[MARS - 1]
if positiveagiamt <= fthb_max_agi:
fthbc = fthbc
else:
fthbc = 0.
return (fthbc)
@iterate_jit(nopython=True)
def ICGTaxCredit(earned_p, earned_s, MARS, ICG_credit_c, ICG_credit_em,
ICG_credit_rt, ICG_credit_thd, icg_expense, c05800, e07300,
icgtc):
"""
Computes nonrefundable informal care giver tax credit.
"""
# not reflected in current law and records modified with imputation
# earned income of taxpayer
icg32880 = earned_p # earned income of taxpayer
if MARS == 2:
icg32890 = earned_s # earned income of spouse when present
else:
icg32890 = earned_p
icg33000 = min(icg32880, icg32890)
if icg33000 > ICG_credit_thd:
# credit for actual expenses
icg_max_credit = (icg_expense - ICG_credit_em) * ICG_credit_rt
icg_credit = max(0., min(icg_max_credit, ICG_credit_c))
# credit is limited to minimum of individuals' earned income
icg_credit = max(0., min(icg_credit, icg33000))
# credit is limited by tax liability
icgtc = min(max(0., c05800 - e07300), icg_credit)
else:
icgtc = 0.
return icgtc
@iterate_jit(nopython=True)
def IRATaxCredit(earned_p, earned_s, MARS, AutoIRA_credit, ira_credit,
c05800, e07300, iratc):
"""
Computes nonrefundable automatic enrollment in IRA tax credit.
"""
# not reflected in current law and records modified with imputation
if AutoIRA_credit is True:
iratc = max(0., ira_credit)
else:
iratc = 0.
return iratc
@iterate_jit(nopython=True)
def EVTaxCredit(EV_credit, ev_credit_amt, EV_credit_c, c00100, EV_credit_ps, MARS,
EV_credit_prt, evtc):
"""
Computes nonrefundable full-electric vehicle tax credit.
"""
if EV_credit is True:
# not reflected in current law and records modified with imputation
elecv_credit = max(0., min(ev_credit_amt, EV_credit_c))
# phaseout based on agi
posevagi = max(c00100, 0.)
ev_max = EV_credit_ps[MARS - 1]
if posevagi < ev_max:
evtc = elecv_credit
else:
evtc_reduced = max(0., evtc - EV_credit_prt * (posevagi - ev_max))
evtc = min(evtc, evtc_reduced)
return evtc
@iterate_jit(nopython=True)
def AmOppCreditParts(exact, e87521, num, c00100, CR_AmOppRefundable_hc,
CR_AmOppNonRefundable_hc, c10960, c87668):
"""
Applies a phaseout to the Form 8863, line 1, American Opportunity Credit
amount, e87521, and then applies the 0.4 refundable rate.
Logic corresponds to Form 8863, Part I.
Notes
-----
Tax Law Parameters that are not parameterized:
90000 : American Opportunity Credit phaseout income base
10000 : American Opportunity Credit phaseout income range length
1/1000 : American Opportunity Credit phaseout rate
0.4 : American Opportunity Credit refundable rate
Parameters
----------
exact : whether or not to do rounding of phaseout fraction
e87521 : total tentative American Opportunity Credit for all students,
Form 8863, line 1
num : number of people filing jointly
c00100 : AGI
CR_AmOppRefundable_hc: haircut for the refundable portion of the
American Opportunity Credit
CR_AmOppNonRefundable_hc: haircut for the nonrefundable portion of the
American Opportunity Credit
Returns
-------
c10960 : Refundable part of American Opportunity Credit
c87668 : Tentative nonrefundable part of American Opportunity Credit
"""
if e87521 > 0.:
c87658 = max(0., 90000. * num - c00100)
c87660 = 10000. * num
if exact == 1: # exact calculation as on tax forms
c87662 = 1000. * min(1., round(c87658 / c87660, 3))
else:
c87662 = 1000. * min(1., c87658 / c87660)
c87664 = c87662 * e87521 / 1000.
c10960 = 0.4 * c87664 * (1. - CR_AmOppRefundable_hc)
c87668 = c87664 - c10960 * (1. - CR_AmOppNonRefundable_hc)
else:
c10960 = 0.
c87668 = 0.
return (c10960, c87668)
@iterate_jit(nopython=True)
def SchR(age_head, age_spouse, MARS, c00100,
c05800, e07300, c07180, e02400, c02500, e01500, e01700, CR_SchR_hc,
c07200):
"""
Calculates Schedule R credit for the elderly and the disabled, c07200.
Note that no Schedule R policy parameters are inflation indexed.
Note that all Schedule R policy parameters are hard-coded, and therefore,
are not able to be changed using Policy class parameters.
Note that the CR_SchR_hc policy parameter allows the user to eliminate
or reduce total Schedule R credits.
"""
if age_head >= 65 or (MARS == 2 and age_spouse >= 65):
# calculate credit assuming nobody is disabled (so line12 = line10)
if MARS == 2:
if age_head >= 65 and age_spouse >= 65:
schr12 = 7500.
else:
schr12 = 5000.
schr15 = 10000.
elif MARS == 3:
schr12 = 3750.
schr15 = 5000.
elif MARS in (1, 4):
schr12 = 5000.
schr15 = 7500.
else:
schr12 = 0.
schr15 = 0.
# nontaxable portion of OASDI benefits, line 13a
schr13a = max(0., e02400 - c02500)
# nontaxable portion of pension benefits, line 13b
# NOTE: the following approximation (required because of inadequate IRS
# data) will be accurate if all pensions are partially taxable
# or if all pensions are fully taxable. But if a filing unit
# receives at least one partially taxable pension and at least
# one fully taxable pension, then the approximation in the
# following line is not exactly correct.
schr13b = max(0., e01500 - e01700)
schr13c = schr13a + schr13b
schr16 = max(0., c00100 - schr15)
schr17 = 0.5 * schr16
schr18 = schr13c + schr17
schr19 = max(0., schr12 - schr18)
schr20 = 0.15 * schr19
schr21 = max(0., (c05800 - e07300 - c07180))
c07200 = min(schr20, schr21) * (1. - CR_SchR_hc)
else: # if not calculating Schedule R credit
c07200 = 0.
return c07200
@iterate_jit(nopython=True)
def EducationTaxCredit(exact, e87530, MARS, c00100, num, c05800,
e07300, c07180, c07200, c87668,
LLC_Expense_c, ETC_pe_Single, ETC_pe_Married,
CR_Education_hc,
c07230):
"""
Computes Education Tax Credits (Form 8863) nonrefundable amount, c07230.
Logic corresponds to Form 8863, Part II.
Notes
-----
Tax Law Parameters that are not parameterized:
0.2 : Lifetime Learning Credit ratio against expense
Tax Law Parameters that are parameterized:
LLC_Expense_c : Lifetime Learning Credit expense limit
ETC_pe_Married : Education Tax Credit phaseout end for married
ETC_pe_Single : Education Tax Credit phaseout end for single
Taxpayer Charateristics:
exact : whether or not to do rounding of phaseout fraction
e87530 : Lifetime Learning Credit total qualified expenses,
Form 8863, line 10
e07300 : Foreign tax credit - Form 1116
c07180 : Child/dependent care expense credit - Form 2441
c07200 : Schedule R credit
Returns
-------
c07230 : Education Tax Credits (Form 8863) nonrefundable amount
"""
c87560 = 0.2 * min(e87530, LLC_Expense_c)
if MARS == 2:
c87570 = ETC_pe_Married * 1000.
else:
c87570 = ETC_pe_Single * 1000.
c87590 = max(0., c87570 - c00100)
c87600 = 10000. * num
if exact == 1: # exact calculation as on tax forms
c87610 = min(1., round(c87590 / c87600, 3))
else:
c87610 = min(1., c87590 / c87600)
c87620 = c87560 * c87610
xline4 = max(0., c05800 - (e07300 + c07180 + c07200))
xline5 = min(c87620, xline4)
xline9 = max(0., c05800 - (e07300 + c07180 + c07200 + xline5))
xline10 = min(c87668, xline9)
c87680 = xline5 + xline10
c07230 = c87680 * (1. - CR_Education_hc)
return c07230
@iterate_jit(nopython=True)
def CharityCredit(e19800, e20100, c00100, CR_Charity_rt, CR_Charity_f,
CR_Charity_frt, MARS, charity_credit):
"""
Computes nonrefundable charity credit, charity_credit.
This credit is not part of current-law policy.
"""
total_charity = e19800 + e20100
floor = max(CR_Charity_frt * c00100, CR_Charity_f[MARS - 1])
charity_cr_floored = max(total_charity - floor, 0)
charity_credit = CR_Charity_rt * (charity_cr_floored)
return charity_credit
@iterate_jit(nopython=True)
def NonrefundableCredits(c05800, e07240, e07260, e07300, e07400,
e07600, p08000, odc,
personal_nonrefundable_credit, icgtc, iratc, evtc,
CR_RetirementSavings_hc, CR_ForeignTax_hc,
CR_ResidentialEnergy_hc, CR_GeneralBusiness_hc,
CR_MinimumTax_hc, CR_OtherCredits_hc, charity_credit,
c07180, c07200, c07220, c07230, c07240,
c07260, c07300, c07400, c07600, c08000):
"""
NonRefundableCredits function sequentially limits credits to tax liability.
Parameters
----------
CR_RetirementSavings_hc: Retirement savings credit haircut
CR_ForeignTax_hc: Foreign tax credit haircut
CR_ResidentialEnergy_hc: Residential energy credit haircut
CR_GeneralBusiness_hc: General business credit haircut
CR_MinimumTax_hc: Minimum tax credit haircut
CR_OtherCredits_hc: Other credits haircut
"""
# limit tax credits to tax liability in order they are on 2015 1040 form
avail = c05800
# Foreign tax credit - Form 1116
c07300 = min(e07300 * (1. - CR_ForeignTax_hc), avail)
avail = avail - c07300
# Child & dependent care expense credit
c07180 = min(c07180, avail)
avail = avail - c07180
# Education tax credit
c07230 = min(c07230, avail)
avail = avail - c07230
# Retirement savings credit - Form 8880
c07240 = min(e07240 * (1. - CR_RetirementSavings_hc), avail)
avail = avail - c07240
# Child tax credit
c07220 = min(c07220, avail)
avail = avail - c07220
# Other dependent credit
odc = min(odc, avail)
avail = avail - odc
# Residential energy credit - Form 5695
c07260 = min(e07260 * (1. - CR_ResidentialEnergy_hc), avail)
avail = avail - c07260
# General business credit - Form 3800
c07400 = min(e07400 * (1. - CR_GeneralBusiness_hc), avail)
avail = avail - c07400
# Prior year minimum tax credit - Form 8801
c07600 = min(e07600 * (1. - CR_MinimumTax_hc), avail)
avail = avail - c07600
# Schedule R credit
c07200 = min(c07200, avail)
avail = avail - c07200
# Other credits
c08000 = min(p08000 * (1. - CR_OtherCredits_hc), avail)
avail = avail - c08000
# Charity credit
charity_credit = min(charity_credit, avail)
avail = avail - charity_credit
# Personal nonrefundable credit
personal_nonrefundable_credit = min(personal_nonrefundable_credit, avail)
avail = avail - personal_nonrefundable_credit
# ICG credit
icgtc = min(icgtc, avail)
avail = avail - icgtc
# IRA credit
iratc = min(iratc, avail)
avail = avail - iratc
# EV credit
evtc = min(evtc, avail)
avail = avail - evtc
return (c07180, c07200, c07220, c07230, c07240, odc,
c07260, c07300, c07400, c07600, c08000, charity_credit,
personal_nonrefundable_credit, icgtc, iratc, evtc)
@iterate_jit(nopython=True)
def AdditionalCTC(codtc_limited, ACTC_c, n24, earned, ACTC_Income_thd,
ACTC_rt, nu06, ACTC_rt_bonus_under6family, ACTC_ChildNum,
ptax_was, c03260, e09800, c59660, e11200,
c11070):
"""
Calculates refundable Additional Child Tax Credit (ACTC), c11070,
following 2018 Form 8812 logic.
"""
# Part I
line3 = codtc_limited
line4 = ACTC_c * n24
c11070 = 0. # line15
if line3 > 0. and line4 > 0.:
line5 = min(line3, line4)
line7 = max(0., earned - ACTC_Income_thd)
# accommodate ACTC rate bonus for families with children under 5
if nu06 == 0:
ACTC_rate = ACTC_rt
else:
ACTC_rate = ACTC_rt + ACTC_rt_bonus_under6family
line8 = ACTC_rate * line7
if n24 < ACTC_ChildNum:
if line8 > 0.:
c11070 = min(line5, line8)
else: # if n24 >= ACTC_ChildNum
if line8 >= line5:
c11070 = line5
else: # complete Part II
line9 = 0.5 * ptax_was
line10 = c03260 + e09800
line11 = line9 + line10
line12 = c59660 + e11200
line13 = max(0., line11 - line12)
line14 = max(line8, line13)
c11070 = min(line5, line14)
return c11070
@iterate_jit(nopython=True)
def C1040(c05800, c07180, c07200, c07220, c07230, c07240, c07260, c07300,
c07400, c07600, c08000, e09700, e09800, e09900, niit, othertaxes,
c07100, c09200, odc, charity_credit,
personal_nonrefundable_credit, icgtc, iratc, evtc):
"""
Computes total used nonrefundable credits, c07100, othertaxes, and
income tax before refundable credits, c09200.
"""
# total used nonrefundable credits (as computed in NonrefundableCredits)
c07100 = (c07180 + c07200 + c07600 + c07300 + c07400 + c07220 + c08000 +
c07230 + c07240 + c07260 + odc + charity_credit +
personal_nonrefundable_credit + icgtc + iratc + evtc)
# tax after credits (2016 Form 1040, line 56)
tax_net_nonrefundable_credits = max(0., c05800 - c07100)
# tax (including othertaxes) before refundable credits
othertaxes = e09700 + e09800 + e09900 + niit
c09200 = othertaxes + tax_net_nonrefundable_credits
return (c07100, othertaxes, c09200)
@iterate_jit(nopython=True)
def CTC_new(CTC_new_c, CTC_new_rt, CTC_new_c_under6_bonus,
CTC_new_ps, CTC_new_prt, CTC_new_for_all,
CTC_new_refund_limited, CTC_new_refund_limit_payroll_rt,
CTC_new_refund_limited_all_payroll, payrolltax,
n24, nu06, c00100, MARS, ptax_oasdi, c09200,
ctc_new):
"""
Computes new refundable child tax credit using specified parameters.
"""
if n24 > 0:
posagi = max(c00100, 0.)
ctc_new = CTC_new_c * n24 + CTC_new_c_under6_bonus * nu06
if not CTC_new_for_all:
ctc_new = min(CTC_new_rt * posagi, ctc_new)
ymax = CTC_new_ps[MARS - 1]
if posagi > ymax:
ctc_new_reduced = max(0.,
ctc_new - CTC_new_prt * (posagi - ymax))
ctc_new = min(ctc_new, ctc_new_reduced)
if ctc_new > 0. and CTC_new_refund_limited:
refund_new = max(0., ctc_new - c09200)
if not CTC_new_refund_limited_all_payroll:
limit_new = CTC_new_refund_limit_payroll_rt * ptax_oasdi
if CTC_new_refund_limited_all_payroll:
limit_new = CTC_new_refund_limit_payroll_rt * payrolltax
limited_new = max(0., refund_new - limit_new)
ctc_new = max(0., ctc_new - limited_new)
else:
ctc_new = 0.
return ctc_new
@iterate_jit(nopython=True)
def CDCC_new(CDCC_new_c, CDCC_new_rt, CDCC_new_ps, CDCC_new_pe, CDCC_new_prt, cdcc_new,
MARS, f2441, e32800, earned_s, earned_p, c05800, e07300, c00100):
"""
Calculates new refundable child and dependent care expense credit, cdcc_new.
"""
# credit for at most two cared-for individuals and for actual expenses
cdcc_new_max_credit = min(f2441, 2) * CDCC_new_c
cdcc_new_32800 = max(0., min(e32800 * CDCC_new_rt, cdcc_new_max_credit))
# credit is limited to minimum of individuals' earned income
cdcc_new_32880 = earned_p # earned income of taxpayer
if MARS == 2:
cdcc_new_32890 = earned_s # earned income of spouse when present
else:
cdcc_new_32890 = earned_p
cdcc_new_33000 = max(0., min(cdcc_new_32800, min(cdcc_new_32880, cdcc_new_32890)))
# credit is limited by tax liability
cdcc_new = min(max(0., c05800 - e07300), cdcc_new_33000)
# phaseout based on agi
positiveagi = max(c00100, 0.)
cdcc_min = CDCC_new_ps[MARS - 1]
cdcc_max = CDCC_new_pe[MARS - 1]
if positiveagi < cdcc_min:
cdcc_new = cdcc_new
elif positiveagi < cdcc_max:
cdcc_new_reduced = max(0., cdcc_new - CDCC_new_prt * (positiveagi - cdcc_min))
cdcc_new = min(cdcc_new, cdcc_new_reduced)
else:
cdcc_new = 0.
return cdcc_new
@iterate_jit(nopython=True)
def IITAX(c59660, c11070, c10960, personal_refundable_credit, ctc_new, rptc,
c09200, payrolltax,
eitc, refund, iitax, combined, iradctc, fthbc, cdcc_new,
business_burden, estate_burden, Business_tax_combined):
"""
Computes final taxes.
"""
eitc = c59660
refund = (eitc + c11070 + c10960 +
personal_refundable_credit + ctc_new + rptc + iradctc + fthbc + cdcc_new)
iitax = c09200 - refund
if Business_tax_combined is True:
combined = iitax + payrolltax + business_burden + estate_burden
else:
combined = iitax + payrolltax
return (eitc, refund, iitax, combined)
@JIT(nopython=True)
def Taxes(income, MARS, tbrk_base,
rate1, rate2, rate3, rate4, rate5, rate6, rate7, rate8,
tbrk1, tbrk2, tbrk3, tbrk4, tbrk5, tbrk6, tbrk7):
"""
Taxes function returns tax amount given the progressive tax rate
schedule specified by the rate* and (upper) tbrk* parameters and
given income, filing status (MARS), and tax bracket base (tbrk_base).
"""
if tbrk_base > 0.:
brk1 = max(tbrk1[MARS - 1] - tbrk_base, 0.)
brk2 = max(tbrk2[MARS - 1] - tbrk_base, 0.)
brk3 = max(tbrk3[MARS - 1] - tbrk_base, 0.)
brk4 = max(tbrk4[MARS - 1] - tbrk_base, 0.)
brk5 = max(tbrk5[MARS - 1] - tbrk_base, 0.)
brk6 = max(tbrk6[MARS - 1] - tbrk_base, 0.)
brk7 = max(tbrk7[MARS - 1] - tbrk_base, 0.)
else:
brk1 = tbrk1[MARS - 1]
brk2 = tbrk2[MARS - 1]
brk3 = tbrk3[MARS - 1]
brk4 = tbrk4[MARS - 1]
brk5 = tbrk5[MARS - 1]
brk6 = tbrk6[MARS - 1]
brk7 = tbrk7[MARS - 1]
return (rate1 * min(income, brk1) +
rate2 * min(brk2 - brk1, max(0., income - brk1)) +
rate3 * min(brk3 - brk2, max(0., income - brk2)) +
rate4 * min(brk4 - brk3, max(0., income - brk3)) +
rate5 * min(brk5 - brk4, max(0., income - brk4)) +
rate6 * min(brk6 - brk5, max(0., income - brk5)) +
rate7 * min(brk7 - brk6, max(0., income - brk6)) +
rate8 * max(0., income - brk7))
def ComputeBenefit(calc, ID_switch):
"""
Calculates the value of the benefits accrued from itemizing.
"""
# compute income tax liability with no itemized deductions allowed for
# the types of itemized deductions covered under the BenefitSurtax
no_ID_calc = copy.deepcopy(calc)
if ID_switch[0]:
no_ID_calc.policy_param('ID_Medical_hc', [1.])
if ID_switch[1]:
no_ID_calc.policy_param('ID_StateLocalTax_hc', [1.])
if ID_switch[2]:
no_ID_calc.policy_param('ID_RealEstate_hc', [1.])
if ID_switch[3]:
no_ID_calc.policy_param('ID_Casualty_hc', [1.])
if ID_switch[4]:
no_ID_calc.policy_param('ID_Miscellaneous_hc', [1.])
if ID_switch[5]:
no_ID_calc.policy_param('ID_InterestPaid_hc', [1.])
if ID_switch[6]:
no_ID_calc.policy_param('ID_Charity_hc', [1.])
no_ID_calc._calc_one_year() # pylint: disable=protected-access
diff_iitax = no_ID_calc.array('iitax') - calc.array('iitax')
benefit = np.where(diff_iitax > 0., diff_iitax, 0.)
return benefit
def BenefitSurtax(calc):
"""
Computes itemized-deduction-benefit surtax and adds the surtax amount
to income tax, combined tax, and surtax liabilities.
"""
if calc.policy_param('ID_BenefitSurtax_crt') != 1.:
ben = ComputeBenefit(calc,
calc.policy_param('ID_BenefitSurtax_Switch'))
agi = calc.array('c00100')
ben_deduct = calc.policy_param('ID_BenefitSurtax_crt') * agi
ben_exempt_array = calc.policy_param('ID_BenefitSurtax_em')
ben_exempt = ben_exempt_array[calc.array('MARS') - 1]
ben_dedem = ben_deduct + ben_exempt
ben_surtax = (calc.policy_param('ID_BenefitSurtax_trt') *
np.where(ben > ben_dedem, ben - ben_dedem, 0.))
# add ben_surtax to income & combined taxes and to surtax subtotal
calc.incarray('iitax', ben_surtax)
calc.incarray('combined', ben_surtax)
calc.incarray('surtax', ben_surtax)
def BenefitLimitation(calc):
"""
Limits the benefits of select itemized deductions to a fraction of
deductible expenses.
"""
if calc.policy_param('ID_BenefitCap_rt') != 1.:
benefit = ComputeBenefit(calc,
calc.policy_param('ID_BenefitCap_Switch'))
# Calculate total deductible expenses under the cap
deduct_exps = 0.
if calc.policy_param('ID_BenefitCap_Switch')[0]: # medical
deduct_exps += calc.array('c17000')
if calc.policy_param('ID_BenefitCap_Switch')[1]: # statelocal
one_minus_hc = 1. - calc.policy_param('ID_StateLocalTax_hc')
deduct_exps += (one_minus_hc *
np.maximum(calc.array('e18400_capped'), 0.))
if calc.policy_param('ID_BenefitCap_Switch')[2]: # realestate
one_minus_hc = 1. - calc.policy_param('ID_RealEstate_hc')
deduct_exps += one_minus_hc * calc.array('e18500_capped')
if calc.policy_param('ID_BenefitCap_Switch')[3]: # casualty
deduct_exps += calc.array('c20500')
if calc.policy_param('ID_BenefitCap_Switch')[4]: # misc
deduct_exps += calc.array('c20800')
if calc.policy_param('ID_BenefitCap_Switch')[5]: # interest
deduct_exps += calc.array('c19200')
if calc.policy_param('ID_BenefitCap_Switch')[6]: # charity
deduct_exps += calc.array('c19700')
# Calculate cap value for itemized deductions
benefit_limit = deduct_exps * calc.policy_param('ID_BenefitCap_rt')
# Add the difference between the actual benefit and capped benefit
# to income tax and combined tax liabilities.
excess_benefit = np.maximum(benefit - benefit_limit, 0)
calc.incarray('iitax', excess_benefit)
calc.incarray('surtax', excess_benefit)
calc.incarray('combined', excess_benefit)
@iterate_jit(nopython=True)
def FairShareTax(c00100, MARS, ptax_was, setax, ptax_amc,
FST_AGI_trt, FST_AGI_thd_lo, FST_AGI_thd_hi,
fstax, iitax, combined, surtax):
"""
Computes Fair Share Tax, or "Buffet Rule", types of reforms.
Taxpayer Characteristics
------------------------
c00100 : AGI
MARS : filing (marital) status
ptax_was : payroll tax on wages and salaries
setax : self-employment tax
ptax_amc : Additional Medicare Tax on high earnings
Returns
-------
fstax : Fair Share Tax amount
iitax : individual income tax augmented by fstax
combined : individual income tax plus payroll taxes augmented by fstax
surtax : individual income tax subtotal augmented by fstax
"""
if FST_AGI_trt > 0. and c00100 >= FST_AGI_thd_lo[MARS - 1]:
employee_share = 0.5 * ptax_was + 0.5 * setax + ptax_amc
fstax = max(c00100 * FST_AGI_trt - iitax - employee_share, 0.)
thd_gap = max(FST_AGI_thd_hi[MARS - 1] - FST_AGI_thd_lo[MARS - 1], 0.)
if thd_gap > 0. and c00100 < FST_AGI_thd_hi[MARS - 1]:
fstax *= (c00100 - FST_AGI_thd_lo[MARS - 1]) / thd_gap
iitax += fstax
combined += fstax
surtax += fstax
else:
fstax = 0.
return (fstax, iitax, combined, surtax)
@iterate_jit(nopython=True)
def LumpSumTax(DSI, num, XTOT,
LST,
lumpsum_tax, combined):
"""
Computes lump-sum tax and add it to combined taxes.
"""
if LST == 0.0 or DSI == 1:
lumpsum_tax = 0.
else:
lumpsum_tax = LST * max(num, XTOT)
combined += lumpsum_tax
return (lumpsum_tax, combined)
@iterate_jit(nopython=True)
def ExpandIncome(e00200, pencon_p, pencon_s, e00300, e00400, e00600,
e00700, e00800, e00900, e01100, e01200, e01400, e01500,
e02000, e02100, p22250, p23250, cmbtp, ptax_was,
benefit_value_total, expanded_income):
"""
Calculates expanded_income from component income types.
"""
expanded_income = (
e00200 + # wage and salary income net of DC pension contributions
pencon_p + # tax-advantaged DC pension contributions for taxpayer
pencon_s + # tax-advantaged DC pension contributions for spouse
e00300 + # taxable interest income
e00400 + # non-taxable interest income
e00600 + # dividends
e00700 + # state and local income tax refunds
e00800 + # alimony received
e00900 + # Sch C business net income/loss
e01100 + # capital gain distributions not reported on Sch D
e01200 + # Form 4797 other net gain/loss
e01400 + # taxable IRA distributions
e01500 + # total pension & annuity income (including DB-plan benefits)
e02000 + # Sch E total rental, ..., partnership, S-corp income/loss
e02100 + # Sch F farm net income/loss
p22250 + # Sch D: net short-term capital gain/loss
p23250 + # Sch D: net long-term capital gain/loss
cmbtp + # other AMT taxable income items from Form 6251
0.5 * ptax_was + # employer share of FICA taxes on wages/salaries
benefit_value_total # consumption value of all benefits received;
# see the BenefitPrograms function in this file for details on
# exactly how the benefit_value_total variable is computed
)
return expanded_income
@iterate_jit(nopython=True)
def AfterTaxIncome(combined, expanded_income, aftertax_income,
Business_tax_expinc, corp_taxliab):
"""
Calculates after-tax expanded income.
Parameters
----------
combined: combined tax liability
expanded_income: expanded income
corp_taxliab: imputed corporate tax liability
Returns
-------
aftertax_income: expanded_income minus combined
"""
if Business_tax_expinc is True:
expanded_income = expanded_income + corp_taxliab
else:
expanded_income = expanded_income
aftertax_income = expanded_income - combined
return aftertax_income
| IRADCTaxCredit |
mod.rs | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::AHBRSTR {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct IOPARSTR {
bits: bool,
}
impl IOPARSTR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool |
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct IOPBRSTR {
bits: bool,
}
impl IOPBRSTR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct IOPCRSTR {
bits: bool,
}
impl IOPCRSTR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct IOPDRSTR {
bits: bool,
}
impl IOPDRSTR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct IOPFRSTR {
bits: bool,
}
impl IOPFRSTR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct TSCRSTR {
bits: bool,
}
impl TSCRSTR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct ADC12RSTR {
bits: bool,
}
impl ADC12RSTR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _IOPARSTW<'a> {
w: &'a mut W,
}
impl<'a> _IOPARSTW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 17;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _IOPBRSTW<'a> {
w: &'a mut W,
}
impl<'a> _IOPBRSTW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 18;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _IOPCRSTW<'a> {
w: &'a mut W,
}
impl<'a> _IOPCRSTW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 19;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _IOPDRSTW<'a> {
w: &'a mut W,
}
impl<'a> _IOPDRSTW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 20;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _IOPFRSTW<'a> {
w: &'a mut W,
}
impl<'a> _IOPFRSTW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 22;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _TSCRSTW<'a> {
w: &'a mut W,
}
impl<'a> _TSCRSTW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _ADC12RSTW<'a> {
w: &'a mut W,
}
impl<'a> _ADC12RSTW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 28;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 17 - I/O port A reset"]
#[inline]
pub fn ioparst(&self) -> IOPARSTR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) != 0
};
IOPARSTR { bits }
}
#[doc = "Bit 18 - I/O port B reset"]
#[inline]
pub fn iopbrst(&self) -> IOPBRSTR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 18;
((self.bits >> OFFSET) & MASK as u32) != 0
};
IOPBRSTR { bits }
}
#[doc = "Bit 19 - I/O port C reset"]
#[inline]
pub fn iopcrst(&self) -> IOPCRSTR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 19;
((self.bits >> OFFSET) & MASK as u32) != 0
};
IOPCRSTR { bits }
}
#[doc = "Bit 20 - I/O port D reset"]
#[inline]
pub fn iopdrst(&self) -> IOPDRSTR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 20;
((self.bits >> OFFSET) & MASK as u32) != 0
};
IOPDRSTR { bits }
}
#[doc = "Bit 22 - I/O port F reset"]
#[inline]
pub fn iopfrst(&self) -> IOPFRSTR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 22;
((self.bits >> OFFSET) & MASK as u32) != 0
};
IOPFRSTR { bits }
}
#[doc = "Bit 24 - Touch sensing controller reset"]
#[inline]
pub fn tscrst(&self) -> TSCRSTR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) != 0
};
TSCRSTR { bits }
}
#[doc = "Bit 28 - ADC1 and ADC2 reset"]
#[inline]
pub fn adc12rst(&self) -> ADC12RSTR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 28;
((self.bits >> OFFSET) & MASK as u32) != 0
};
ADC12RSTR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 17 - I/O port A reset"]
#[inline]
pub fn ioparst(&mut self) -> _IOPARSTW {
_IOPARSTW { w: self }
}
#[doc = "Bit 18 - I/O port B reset"]
#[inline]
pub fn iopbrst(&mut self) -> _IOPBRSTW {
_IOPBRSTW { w: self }
}
#[doc = "Bit 19 - I/O port C reset"]
#[inline]
pub fn iopcrst(&mut self) -> _IOPCRSTW {
_IOPCRSTW { w: self }
}
#[doc = "Bit 20 - I/O port D reset"]
#[inline]
pub fn iopdrst(&mut self) -> _IOPDRSTW {
_IOPDRSTW { w: self }
}
#[doc = "Bit 22 - I/O port F reset"]
#[inline]
pub fn iopfrst(&mut self) -> _IOPFRSTW {
_IOPFRSTW { w: self }
}
#[doc = "Bit 24 - Touch sensing controller reset"]
#[inline]
pub fn tscrst(&mut self) -> _TSCRSTW {
_TSCRSTW { w: self }
}
#[doc = "Bit 28 - ADC1 and ADC2 reset"]
#[inline]
pub fn adc12rst(&mut self) -> _ADC12RSTW {
_ADC12RSTW { w: self }
}
}
| {
!self.bit()
} |
alert_query.go | // Copyright 2018 Prometheus Team
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cli
import (
"context"
"errors"
"fmt"
kingpin "gopkg.in/alecthomas/kingpin.v2"
"github.com/prometheus/alertmanager/api/v2/client/alert"
"github.com/prometheus/alertmanager/cli/format"
"github.com/prometheus/alertmanager/pkg/parse"
)
type alertQueryCmd struct {
inhibited, silenced, active, unprocessed bool
receiver string
matcherGroups []string
}
const alertQueryHelp = `View and search through current alerts.
Amtool has a simplified prometheus query syntax, but contains robust support for
bash variable expansions. The non-option section of arguments constructs a list
of "Matcher Groups" that will be used to filter your query. The following
examples will attempt to show this behaviour in action:
amtool alert query alertname=foo node=bar
This query will match all alerts with the alertname=foo and node=bar label
value pairs set.
amtool alert query foo node=bar
If alertname is omitted and the first argument does not contain a '=' or a
'=~' then it will be assumed to be the value of the alertname pair.
amtool alert query 'alertname=~foo.*'
As well as direct equality, regex matching is also supported. The '=~' syntax
(similar to prometheus) is used to represent a regex match. Regex matching
can be used in combination with a direct match.
Amtool supports several flags for filtering the returned alerts by state
(inhibited, silenced, active, unprocessed). If none of these flags is given,
only active alerts are returned.
`
func configureQueryAlertsCmd(cc *kingpin.CmdClause) |
func (a *alertQueryCmd) queryAlerts(ctx context.Context, _ *kingpin.ParseContext) error {
if len(a.matcherGroups) > 0 {
// Attempt to parse the first argument. If the parser fails
// then we likely don't have a (=|=~|!=|!~) so lets assume that
// the user wants alertname=<arg> and prepend `alertname=` to
// the front.
m := a.matcherGroups[0]
_, err := parse.Matcher(m)
if err != nil {
a.matcherGroups[0] = fmt.Sprintf("alertname=%s", m)
}
}
// If no selector was passed, default to showing active alerts.
if !a.silenced && !a.inhibited && !a.active && !a.unprocessed {
a.active = true
}
alertParams := alert.NewGetAlertsParams().WithContext(ctx).
WithActive(&a.active).
WithInhibited(&a.inhibited).
WithSilenced(&a.silenced).
WithUnprocessed(&a.unprocessed).
WithReceiver(&a.receiver).
WithFilter(a.matcherGroups)
amclient := NewAlertmanagerClient(alertmanagerURL)
getOk, err := amclient.Alert.GetAlerts(alertParams)
if err != nil {
return err
}
formatter, found := format.Formatters[output]
if !found {
return errors.New("unknown output formatter")
}
return formatter.FormatAlerts(getOk.Payload)
}
| {
var (
a = &alertQueryCmd{}
queryCmd = cc.Command("query", alertQueryHelp).Default()
)
queryCmd.Flag("inhibited", "Show inhibited alerts").Short('i').BoolVar(&a.inhibited)
queryCmd.Flag("silenced", "Show silenced alerts").Short('s').BoolVar(&a.silenced)
queryCmd.Flag("active", "Show active alerts").Short('a').BoolVar(&a.active)
queryCmd.Flag("unprocessed", "Show unprocessed alerts").Short('u').BoolVar(&a.unprocessed)
queryCmd.Flag("receiver", "Show alerts matching receiver (Supports regex syntax)").Short('r').StringVar(&a.receiver)
queryCmd.Arg("matcher-groups", "Query filter").StringsVar(&a.matcherGroups)
queryCmd.Action(execWithTimeout(a.queryAlerts))
} |
main.py | from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver import Firefox, Chrome, PhantomJS
from selenium import webdriver
from argparse import ArgumentParser
from urllib.parse import quote
import time
import copy
import sys
import os
TIMEOUT = 20
TIMESLP = 3
def login(driver, username, password, failed=0):
if failed == 3:
raise Exception('门户登录失败')
iaaaUrl = 'https://iaaa.pku.edu.cn/iaaa/oauth.jsp'
appName = quote('北京大学校内信息门户新版')
redirectUrl = 'https://portal.pku.edu.cn/portal2017/ssoLogin.do'
driver.get('https://portal.pku.edu.cn/portal2017/')
driver.get(
f'{iaaaUrl}?appID=portal2017&appName={appName}&redirectUrl={redirectUrl}'
)
print('门户登陆中...')
driver.find_element_by_id('user_name').send_keys(username)
time.sleep(TIMESLP)
driver.find_element_by_id('password').send_keys(password)
time.sleep(TIMESLP)
driver.find_element_by_id('logon_button').click()
try:
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.LINK_TEXT, '我知道了')))
except:
pass
else:
driver.find_element_by_link_text('我知道了').click()
try:
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.ID, 'all')))
except:
login(driver, username, password, failed + 1)
else:
print('门户登录成功!')
def go_to_application_out(driver):
driver.find_element_by_id('all').click()
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.ID, 'tag_s_stuCampusExEnReq')))
driver.find_element_by_id('tag_s_stuCampusExEnReq').click()
time.sleep(TIMESLP)
driver.switch_to.window(driver.window_handles[-1])
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.CLASS_NAME, 'el-card__body')))
time.sleep(TIMESLP)
driver.find_element_by_class_name('el-card__body').click()
time.sleep(TIMESLP)
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.CLASS_NAME, 'el-input__inner')))
def go_to_application_in(driver):
driver.get('https://portal.pku.edu.cn/portal2017/#/bizCenter')
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.ID, 'all')))
driver.find_element_by_id('all').click()
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.ID, 'tag_s_stuCampusExEnReq')))
driver.find_element_by_id('tag_s_stuCampusExEnReq').click()
time.sleep(TIMESLP)
driver.switch_to.window(driver.window_handles[-1])
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.CLASS_NAME, 'el-card__body')))
time.sleep(TIMESLP)
driver.find_element_by_class_name('el-card__body').click()
time.sleep(TIMESLP)
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located((By.CLASS_NAME, 'el-select')))
def select_in_out(driver, way):
driver.find_element_by_class_name('el-select').click()
time.sleep(TIMESLP)
driver.find_element_by_xpath(f'//li/span[text()="{way}"]').click()
def select_campus(driver, campus):
driver.find_elements_by_class_name('el-select')[1].click()
time.sleep(TIMESLP)
driver.find_element_by_xpath(f'//li/span[text()="{campus}"]').click()
def select_destination(driver, destination):
driver.find_elements_by_class_name('el-select')[2].click()
time.sleep(TIMESLP)
driver.find_element_by_xpath(f'//li/span[text()="{destination}"]').click()
def select_district(driver, district):
driver.find_elements_by_class_name('el-select')[3].click()
time.sleep(TIMESLP)
driver.find_element_by_xpath(f'//li/span[text()="{district}"]').click()
def write_reason(driver, reason):
driver.find_element_by_class_name('el-textarea__inner').send_keys(
f'{reason}')
time.sleep(TIMESLP)
def write_track(driver, track):
driver.find_elements_by_class_name('el-textarea__inner')[1].send_keys(
f'{track}')
time.sleep(TIMESLP)
def write_street(driver, street):
driver.find_elements_by_class_name('el-textarea__inner')[1].send_keys(
f'{street}')
time.sleep(TIMESLP)
def click_check(driver):
driver.find_element_by_class_name('el-checkbox__label').click()
time.sleep(TIMESLP)
def click_inPeking(driver):
driver.find_element_by_class_name('el-radio__inner').click()
time.sleep(TIMESLP)
def submit(driver):
driver.find_element_by_xpath(
'//button/span[contains(text(),"保存")]').click()
WebDriverWait(driver, TIMEOUT).until(
EC.visibility_of_element_located(
(By.XPATH, '(//button/span[contains(text(),"提交")])[3]')))
driver.find_element_by_xpath(
'(//button/span[contains(text(),"提交")])[3]').click()
time.sleep(TIMESLP)
def fill_out(driver, campus, reason, destination, track):
print('开始填报出校备案')
print('选择出校/入校 ', end='')
select_in_out(driver, '出校')
print('Done')
print('选择校区 ', end='')
select_campus(driver, campus)
print('Done')
print('填写出入校事由 ', end='')
write_reason(driver, reason)
print('Done')
print('选择出校目的地 ', end='')
select_destination(driver, destination)
print('Done')
print('填写出校行动轨迹 ', end='')
write_track(driver, track)
print('Done')
click_check(driver)
submit(driver)
print('出校备案填报完毕!')
def fill_in(driver, campus, reason, habitation, district, street):
print('开始填报入校备案')
print('选择出校/入校 ', end='')
select_in_out(driver, '入校')
print('Done')
print('填写出入校事由 ', end='')
write_reason(driver, reason)
print('Done')
if habitation != '北京':
raise Exception('暂不支持京外入校备案,请手动填写')
print('选择居住地所在区 ', end='')
select_district(driver, district)
print('Done')
print('填写居住地所在街道 ', end='')
write_street(driver, street)
print('Done')
click_inPeking(driver)
click_check(driver)
submit(driver)
print('入校备案填报完毕!')
def run(driver, username, password, campus, reason_in, reason_out, destination, track,
habitation, district, street):
login(driver, username, password)
print('=================================')
go_to_application_out(driver)
fill_out(driver, campus, reason_out, destination, track)
print('=================================')
go_to_application_in(driver)
fill_in(driver, campus, reason_in, habitation, district, street)
print('=================================')
print('可以愉快的玩耍啦!')
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--username', '-u', type=str, help='用户名')
parser.add_argument('--password', '-p', type=str, help='密码')
parser.add_argument('--campus', type=str, help='所在校区, 燕园、万柳、畅春园、圆明园、中关新园', default='燕园')
parser.add_argument('--reason_out', type=str, help='出校原因, eg. 吃饭', default='回家')
parser.add_argument('--reason_in', type=str, help='出校原因, eg. 吃饭', default='学习')
parser.add_argument('--destination', type=str, help='出校目的地, eg. 北京', default='北京')
parser.add_argument('--track', type=str, help='出校轨迹, eg. 畅春园食堂', default='燕园大厦东门-上地街道')
parser.add_argument('--habitation', type=str, help='入校前居住地, eg. 北京', default='北京')
parser.add_argument('--district', type=str, help='入校前居住所在区, eg. 海淀区', default='海淀区')
parser.add_argument('--street', type=str, help='入校前居住所在街道, eg. 燕园街道', default='上地街道')
args = parser.parse_args()
args_public = copy.deepcopy(args)
args_public.password = 'xxxxxxxx'
print('Arguments: {}'.format(args_public))
print('Driver Launching...')
# driver = Firefox()
# driver = Chrome()
if sys.platform == 'darwin': # macOS
phantomjs_path = os.path.join('phantomjs', 'phantomjs-darwin')
elif sys.platform == 'linux': # linux
phantomjs_path = os.path.join('phantomjs', 'phantomjs-linux-x86_64')
else: # windows
phantomjs_path = os.path.join('phantomjs', 'phantomjs-windows.exe')
driver = PhantomJS(executable_path=phantomjs_path)
run(driver, args.username, args.password, args.campus, args.reason_in, args.reason_out,
args.destination, args.track, args.habitation, args.district,
args.street)
driver.close()
| ||
grpc_client_test.go | package discovery
import (
"context"
"errors"
"testing"
"github.com/childoftheuniverse/etcd-discovery/testing/mock_etcd"
"github.com/childoftheuniverse/etcd-discovery/testing/mock_grpc"
"github.com/coreos/etcd/clientv3"
"github.com/coreos/etcd/mvcc/mvccpb"
"github.com/golang/mock/gomock"
"github.com/golang/protobuf/proto"
"google.golang.org/grpc"
)
func TestNewGrpcClient_PartialPath_Success(t *testing.T) |
func TestNewGrpcClient_FullPath_Success(t *testing.T) {
controller := gomock.NewController(t)
oldGrpcDialContext := grpc.DialContext
mockDialer := mock_grpc.NewMockDialer(controller)
mockKV := mock_etcd.NewMockKV(controller)
grpcDialContext = mockDialer.DialContext
defer func() { grpcDialContext = oldGrpcDialContext }()
ctx := context.TODO()
service := ExportedServiceRecord{
Protocol: "test",
Address: "localhost",
Port: 423,
}
pbData, err := proto.Marshal(&service)
if err != nil {
t.Error("Unable to encode test data: ", err)
}
mockKV.EXPECT().Get(ctx, "/this/is/a/full/path", gomock.Any()).Return(
&clientv3.GetResponse{
Kvs: []*mvccpb.KeyValue{
&mvccpb.KeyValue{
Key: []byte("/this/is/a/full/path"),
Value: pbData,
},
},
Count: 1,
}, nil)
mockDialer.EXPECT().DialContext(ctx, "localhost:423").Return(
&grpc.ClientConn{}, nil)
conn, err := NewGrpcClient(ctx, mockKV, "/this/is/a/full/path")
if err != nil {
t.Error("Received unexpected error from NewGrpcClient: ", err)
}
if conn == nil {
t.Error("Received nil connection")
}
}
func TestNewGrpcClient_FullPathWithTrailingSlash(t *testing.T) {
controller := gomock.NewController(t)
oldGrpcDialContext := grpc.DialContext
mockDialer := mock_grpc.NewMockDialer(controller)
mockKV := mock_etcd.NewMockKV(controller)
grpcDialContext = mockDialer.DialContext
defer func() { grpcDialContext = oldGrpcDialContext }()
ctx := context.TODO()
service := ExportedServiceRecord{
Protocol: "test",
Address: "localhost",
Port: 423,
}
pbData, err := proto.Marshal(&service)
if err != nil {
t.Error("Unable to encode test data: ", err)
}
mockKV.EXPECT().Get(ctx, "/this/is/a/full/", gomock.Any()).Return(
&clientv3.GetResponse{
Kvs: []*mvccpb.KeyValue{
&mvccpb.KeyValue{
Key: []byte("/this/is/a/full/path"),
Value: pbData,
},
},
Count: 1,
}, nil)
mockDialer.EXPECT().DialContext(ctx, "localhost:423").Return(
&grpc.ClientConn{}, nil)
conn, err := NewGrpcClient(ctx, mockKV, "/this/is/a/full/")
if err != nil {
t.Error("Received unexpected error from NewGrpcClient: ", err)
}
if conn == nil {
t.Error("Received nil connection")
}
}
func TestNewGrpcClient_MultiplePaths(t *testing.T) {
controller := gomock.NewController(t)
oldGrpcDialContext := grpc.DialContext
mockDialer := mock_grpc.NewMockDialer(controller)
mockKV := mock_etcd.NewMockKV(controller)
grpcDialContext = mockDialer.DialContext
defer func() { grpcDialContext = oldGrpcDialContext }()
ctx := context.TODO()
service1 := ExportedServiceRecord{
Protocol: "test",
Address: "localhost",
Port: 423,
}
pbData1, err := proto.Marshal(&service1)
if err != nil {
t.Error("Unable to encode test data: ", err)
}
service2 := ExportedServiceRecord{
Protocol: "test",
Address: "localhost",
Port: 345,
}
pbData2, err := proto.Marshal(&service2)
if err != nil {
t.Error("Unable to encode test data: ", err)
}
mockKV.EXPECT().Get(ctx, "/ns/service/test", gomock.Any()).Return(
&clientv3.GetResponse{
Kvs: []*mvccpb.KeyValue{
&mvccpb.KeyValue{
Key: []byte("/ns/service/test/a"),
Value: pbData1,
},
&mvccpb.KeyValue{
Key: []byte("/ns/service/test/b"),
Value: pbData2,
},
},
Count: 2,
}, nil)
mockDialer.EXPECT().DialContext(ctx, "localhost:423").Return(
nil, errors.New("That didn't work bruh"))
mockDialer.EXPECT().DialContext(ctx, "localhost:345").Return(
&grpc.ClientConn{}, nil)
conn, err := NewGrpcClient(ctx, mockKV, "/ns/service/test")
if err != nil {
t.Error("Received unexpected error from NewGrpcClient: ", err)
}
if conn == nil {
t.Error("Received nil connection")
}
}
func TestNewGrpcClient_EtcdFails(t *testing.T) {
controller := gomock.NewController(t)
oldGrpcDialContext := grpc.DialContext
mockDialer := mock_grpc.NewMockDialer(controller)
mockKV := mock_etcd.NewMockKV(controller)
grpcDialContext = mockDialer.DialContext
defer func() { grpcDialContext = oldGrpcDialContext }()
ctx := context.TODO()
mockKV.EXPECT().Get(ctx, "/ns/service/test", gomock.Any()).Return(
nil, errors.New("etcd is sad"))
conn, err := NewGrpcClient(ctx, mockKV, "test")
if err == nil {
t.Error("NewGrpcClient succeeds despite etcd errors?")
}
if conn != nil {
t.Error("Received non-nil connection")
}
}
func TestNewGrpcClient_EtcdContentsGarbled(t *testing.T) {
controller := gomock.NewController(t)
oldGrpcDialContext := grpc.DialContext
mockDialer := mock_grpc.NewMockDialer(controller)
mockKV := mock_etcd.NewMockKV(controller)
grpcDialContext = mockDialer.DialContext
defer func() { grpcDialContext = oldGrpcDialContext }()
ctx := context.TODO()
mockKV.EXPECT().Get(ctx, "/ns/service/test", gomock.Any()).Return(
&clientv3.GetResponse{
Kvs: []*mvccpb.KeyValue{
&mvccpb.KeyValue{
Key: []byte("/ns/service/test"),
Value: []byte("Whazzup bruh"),
},
},
Count: 1,
}, nil)
conn, err := NewGrpcClient(ctx, mockKV, "test")
if err == nil {
t.Error("NewGrpcClient succeeds despite expected decoding errors?")
}
if conn != nil {
t.Error("Received non-nil connection")
}
}
func TestNewGrpcClient_EtcdNoResults(t *testing.T) {
controller := gomock.NewController(t)
oldGrpcDialContext := grpc.DialContext
mockDialer := mock_grpc.NewMockDialer(controller)
mockKV := mock_etcd.NewMockKV(controller)
grpcDialContext = mockDialer.DialContext
defer func() { grpcDialContext = oldGrpcDialContext }()
ctx := context.TODO()
mockKV.EXPECT().Get(ctx, "/ns/service/test", gomock.Any()).Return(
&clientv3.GetResponse{
Kvs: []*mvccpb.KeyValue{},
Count: 1,
}, nil)
conn, err := NewGrpcClient(ctx, mockKV, "test")
if err == nil {
t.Error("NewGrpcClient succeeds despite expected decoding errors?")
}
if conn != nil {
t.Error("Received non-nil connection")
}
}
func TestNewGrpcClient_ConnectionFailed(t *testing.T) {
controller := gomock.NewController(t)
oldGrpcDialContext := grpc.DialContext
mockDialer := mock_grpc.NewMockDialer(controller)
mockKV := mock_etcd.NewMockKV(controller)
grpcDialContext = mockDialer.DialContext
defer func() { grpcDialContext = oldGrpcDialContext }()
ctx := context.TODO()
service := ExportedServiceRecord{
Protocol: "test",
Address: "localhost",
Port: 423,
}
pbData, err := proto.Marshal(&service)
if err != nil {
t.Error("Unable to encode test data: ", err)
}
mockKV.EXPECT().Get(ctx, "/ns/service/test", gomock.Any()).Return(
&clientv3.GetResponse{
Kvs: []*mvccpb.KeyValue{
&mvccpb.KeyValue{
Key: []byte("/ns/service/test"),
Value: pbData,
},
},
Count: 1,
}, nil)
mockDialer.EXPECT().DialContext(ctx, "localhost:423").Return(
nil, errors.New("Sad, sad network"))
conn, err := NewGrpcClient(ctx, mockKV, "test")
if err == nil {
t.Error("NewGrpcClient succeeds despite expected connection errors?")
}
if conn != nil {
t.Error("Received non-nil connection")
}
}
| {
controller := gomock.NewController(t)
oldGrpcDialContext := grpc.DialContext
mockDialer := mock_grpc.NewMockDialer(controller)
mockKV := mock_etcd.NewMockKV(controller)
grpcDialContext = mockDialer.DialContext
defer func() { grpcDialContext = oldGrpcDialContext }()
ctx := context.TODO()
service := ExportedServiceRecord{
Protocol: "test",
Address: "localhost",
Port: 423,
}
pbData, err := proto.Marshal(&service)
if err != nil {
t.Error("Unable to encode test data: ", err)
}
mockKV.EXPECT().Get(ctx, "/ns/service/test", gomock.Any()).Return(
&clientv3.GetResponse{
Kvs: []*mvccpb.KeyValue{
&mvccpb.KeyValue{
Key: []byte("/ns/service/test"),
Value: pbData,
},
},
Count: 1,
}, nil)
mockDialer.EXPECT().DialContext(ctx, "localhost:423").Return(
&grpc.ClientConn{}, nil)
conn, err := NewGrpcClient(ctx, mockKV, "test")
if err != nil {
t.Error("Received unexpected error from NewGrpcClient: ", err)
}
if conn == nil {
t.Error("Received nil connection")
}
} |
conn_tcp.go | package bi
import (
"encoding/binary"
"net"
)
//TCPConn TCPConn
type TCPConn struct {
conn *net.TCPConn
}
//NewTCPConn NewTCPConn
func NewTCPConn(conn *net.TCPConn) *TCPConn {
c := TCPConn{conn: conn}
return &c
}
//Close Close
func (conn *TCPConn) Close() {
conn.conn.Close()
}
//RemoteAddr RemoteAddr
func (conn *TCPConn) RemoteAddr() string {
return conn.conn.RemoteAddr().String()
}
//Read Read
func (conn *TCPConn) Read() ([]byte, error) {
head := make([]byte, 4)
if err := conn.read2(head); nil != err {
return nil, err
}
bodySize := binary.BigEndian.Uint32(head)
if 0 == bodySize {
return nil, nil
}
data := make([]byte, bodySize)
if err := conn.read2(data); nil != err {
return nil, err
}
return data, nil
}
func (conn *TCPConn) read2(data []byte) error {
didReadBytesTotal := 0
didReadBytes := 0
var err error
for {
if didReadBytesTotal == len(data) {
break
}
if didReadBytes, err = conn.conn.Read(data[didReadBytesTotal:]); nil != err {
// log.Println(err)
break
}
didReadBytesTotal += didReadBytes | //Write Write
func (conn *TCPConn) Write(data []byte) error {
var err error
head := make([]byte, 4)
if nil != data && 0 <= len(data) {
binary.BigEndian.PutUint32(head, uint32(len(data)))
head = append(head, data...)
} else {
binary.BigEndian.PutUint32(head, 0)
}
err = conn.write(head)
return err
}
func (conn *TCPConn) write(data []byte) error {
var err error
didWriteBytes := 0
for 0 < len(data) {
didWriteBytes, err = conn.conn.Write(data)
if nil != err {
// log.Println(err)
break
}
data = data[didWriteBytes:]
}
return err
} | }
return err
}
|
frame.rs | use leema::code::Code;
use leema::lstr::Lstr;
use leema::reg::{Ireg, Reg};
use leema::rsrc;
use leema::val::{Env, Val};
use std::fmt::{self, Debug};
use std::mem;
use std::rc::Rc;
use std::sync::Arc;
use futures::sync::oneshot::Sender as FutureSender;
pub enum Parent
{
Null,
Caller(Rc<Code>, Box<Frame>, Reg),
// Fork(Arc<AtomicBool>, mpsc::Sender<Msg>),
Future(FutureSender<Val>, Val),
Repl(Val),
Main(Val),
}
impl Parent
{
pub fn new_main() -> Parent
{
Parent::Main(Val::Void)
}
pub fn new_future(dst: FutureSender<Val>) -> Parent
{
Parent::Future(dst, Val::Void)
}
pub fn set_result(&mut self, r: Val)
{
match self {
&mut Parent::Caller(_, ref mut pf, ref dst) => {
pf.e.set_reg(dst, r);
}
// &mut Parent::Fork(_, _) => {}
&mut Parent::Main(ref mut res) => {
*res = r;
}
&mut Parent::Repl(ref mut res) => {
*res = r;
}
&mut Parent::Null => {}
&mut Parent::Future(_, ref mut dst) => {
*dst = r;
}
}
}
}
impl Debug for Parent
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
{
match self {
&Parent::Null => write!(f, "Parent::Null"),
&Parent::Caller(ref code, ref pf, ref dst) => {
write!(f, "Parent::Caller({:?}, {}, {:?})", dst, code, pf)
}
/*
&Parent::Fork(ref ready, _) => {
write!(f, "Parent::Fork({:?})", ready)
}
*/
&Parent::Repl(ref res) => write!(f, "Parent::Repl({:?})", res),
&Parent::Main(ref res) => write!(f, "Parent::Main({:?})", res),
&Parent::Future(_, ref res) => {
write!(f, "Parent::Future({:?})", res)
}
}
}
}
pub enum Event
{
Uneventful,
Call(Reg, i16, Lstr, Lstr, Val),
Fork,
FutureWait(Reg),
IOWait,
Iop((i64, i64), rsrc::IopAction, Vec<Val>),
// IoFuture(Box<future::Future<Item=(), Error=()>>),
Complete(bool),
Success,
Failure,
}
impl Event
{
pub fn success() -> Event
{
Event::Complete(true)
}
pub fn failure() -> Event
{
Event::Complete(false)
}
}
impl fmt::Debug for Event
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
{
match self {
&Event::Uneventful => write!(f, "Uneventful"),
&Event::Call(ref r, line, ref cmod, ref cfunc, ref cargs) => {
write!(
f,
"Event::Call({:?}@{}, {}::{}, {:?})",
r, line, cmod, cfunc, cargs
)
}
&Event::Fork => write!(f, "Event::Fork"),
&Event::FutureWait(ref r) => write!(f, "Event::FutureWait({})", r),
&Event::IOWait => write!(f, "Event::IOWait"),
&Event::Iop(wrid, _, ref iopargs) => {
write!(f, "Event::Iop({:?}, f, {:?})", wrid, iopargs)
}
&Event::Complete(c) => write!(f, "Event::Complete({})", c),
&Event::Success => write!(f, "Event::Success"),
&Event::Failure => write!(f, "Event::Failure"),
}
}
}
impl PartialEq for Event
{
fn eq(&self, other: &Event) -> bool
{
match (self, other) {
(&Event::Uneventful, &Event::Uneventful) => true,
(
&Event::Call(ref r1, line1, ref m1, ref f1, ref a1),
&Event::Call(ref r2, line2, ref m2, ref f2, ref a2),
) => r1 == r2 && line1 == line2 && m1 == m2 && f1 == f2 && a1 == a2,
(&Event::Fork, &Event::Fork) => true,
(&Event::FutureWait(ref r1), &Event::FutureWait(ref r2)) => {
r1 == r2
}
(&Event::IOWait, &Event::IOWait) => true,
(&Event::Success, &Event::Success) => true,
(&Event::Failure, &Event::Failure) => true,
_ => false,
}
}
}
#[derive(Debug)]
pub enum FrameTraceDirection
{
CallUp,
FailHere,
ReturnDown,
}
#[derive(Debug)]
pub struct FrameTrace
{
// TODO: Implement this in leema later
direction: FrameTraceDirection,
function: Lstr,
line: i16,
parent: Option<Arc<FrameTrace>>,
}
impl FrameTrace
{
pub fn new_root() -> Arc<FrameTrace>
{
Arc::new(FrameTrace {
direction: FrameTraceDirection::CallUp,
function: Lstr::Sref("__init__"),
line: 0,
parent: None,
})
}
pub fn push_call(
parent: &Arc<FrameTrace>,
func: &Lstr,
line: i16,
) -> Arc<FrameTrace>
|
pub fn propagate_down(
trace: &Arc<FrameTrace>,
func: &Lstr,
line: i16,
) -> Arc<FrameTrace>
{
Arc::new(FrameTrace {
direction: FrameTraceDirection::ReturnDown,
function: func.clone(),
line,
parent: Some(trace.clone()),
})
}
pub fn fail_here(&self) -> Arc<FrameTrace>
{
Arc::new(FrameTrace {
direction: FrameTraceDirection::FailHere,
function: self.function.clone(),
line: self.line,
parent: self.parent.clone(),
})
}
}
impl fmt::Display for FrameTrace
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
{
let dir = match self.direction {
FrameTraceDirection::CallUp => " >",
FrameTraceDirection::FailHere => "<>",
FrameTraceDirection::ReturnDown => "< ",
};
write!(f, "{} {}", dir, self.function).ok();
if self.line != 0 {
write!(f, ":{}", self.line).ok();
}
match self.parent {
None => writeln!(f),
Some(ref p) => write!(f, "\n{}", p),
}
}
}
#[derive(Debug)]
pub struct Frame
{
pub parent: Parent,
pub module: Lstr,
pub function: Lstr,
pub trace: Arc<FrameTrace>,
pub e: Env,
pub pc: i32,
}
impl Frame
{
pub fn new_root(parent: Parent, module: Lstr, function: Lstr) -> Frame
{
let env = Env::new();
Frame {
parent,
trace: FrameTrace::new_root(),
module,
function,
e: env,
pc: 0,
}
}
/*
pub fn new_fork(f: &Frame, ready: &Arc<AtomicBool>, tx: mpsc::Sender<Msg>)
-> Frame
{
Frame{
parent: Parent::Fork(ready.clone(), tx),
name: f.name.clone(),
trace: f.trace.clone(),
e: f.e.clone(),
id: f.id,
pc: 0,
}
}
*/
pub fn set_parent(&mut self, p: Parent)
{
self.parent = p;
}
pub fn take_parent(&mut self) -> Parent
{
mem::replace(&mut self.parent, Parent::Null)
}
pub fn push_frame_trace(&self, line: i16) -> Arc<FrameTrace>
{
FrameTrace::push_call(&self.trace, &self.function, line)
}
pub fn take_env(&mut self) -> Env
{
let mut e = Env::new();
mem::swap(&mut e, &mut self.e);
e
}
/**
* handy accessor function when calling from rust native functions
*/
pub fn get_param(&self, p: i8) -> &Val
{
self.e.get_reg(&Reg::Param(Ireg::Reg(p)))
}
pub fn get_param_mut(&mut self, p: i8) -> &mut Val
{
self.e.get_reg_mut(&Reg::Param(Ireg::Reg(p)))
}
}
/*
impl Debug for Frame
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
{
write!(f, "Frame({:?}, {:?}, {})",
self.parent, self.e, self.pc,
)
}
}
*/
/*
* fork the frame and frame state
* add it to the fresh queue
* jump current frame state past the fork block
fn execute_fork(curf: &mut Frame,
dst: &Reg, freg: &Reg, argreg: &Reg
) -> Event {
println!("execute_fork");
// args are empty for a fork
// create new frame
let e = Env::new();
// set current state to called
let (tx, rx) = mpsc::channel::<Val>();
let ready = Arc::new(AtomicBool::new(false));
let newf = Frame::new_fork(&curf, &ready, tx);
curf.e.set_reg(dst, Val::future(ready, rx));
curf.pc = curf.pc + 1;
Event::Fork
}
*/
/*
process_set
process
| \- base frames
| | \- call code
\--- fork
*/
| {
Arc::new(FrameTrace {
direction: FrameTraceDirection::CallUp,
function: func.clone(),
line,
parent: Some(parent.clone()),
})
} |
lib.rs | use bindgen::{Bindings, EnumVariation, RustTarget};
use serde_derive::Deserialize;
use std::collections::HashMap;
use std::error::Error;
use std::fmt;
use std::fs::{read_to_string, File};
use std::io::Read;
use std::path::Path;
#[derive(Debug)]
struct BindgenError;
impl fmt::Display for BindgenError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Failed to generate bindings")
}
}
impl Error for BindgenError {}
#[derive(Deserialize)]
struct | {
enums: HashMap<String, serde_json::Value>,
structs: HashMap<String, serde_json::Value>,
}
#[derive(Deserialize)]
struct DefinitionArg {
#[serde(rename = "type")]
type_: String,
}
#[derive(Deserialize)]
struct Definition {
#[serde(rename = "argsT")]
args_t: Vec<DefinitionArg>,
ov_cimguiname: String,
}
#[derive(Debug, Clone)]
struct Whitelist {
enums: Vec<String>,
structs: Vec<String>,
definitions: Vec<String>,
}
fn parse_whitelist<R: Read>(
structs_and_enums: R,
definitions: R,
) -> Result<Whitelist, serde_json::Error> {
let StructsAndEnums { enums, structs } = serde_json::from_reader(structs_and_enums)?;
let enums = enums.keys().cloned().collect();
let structs = structs.keys().cloned().collect();
let definitions: HashMap<String, Vec<Definition>> = serde_json::from_reader(definitions)?;
let definitions = definitions
.into_iter()
.flat_map(|(_, defs)| defs.into_iter())
.filter_map(|d| {
let uses_va_list = d.args_t.iter().any(|a| a.type_ == "va_list");
if uses_va_list {
None
} else {
Some(d.ov_cimguiname)
}
})
.collect();
Ok(Whitelist {
enums,
structs,
definitions,
})
}
pub fn generate_bindings<P: AsRef<Path>>(
path: &P,
wasm_import_name: Option<String>,
) -> Result<Bindings, Box<dyn Error>> {
let path = path.as_ref();
let structs_and_enums = File::open(path.join("structs_and_enums.json"))?;
let definitions = File::open(path.join("definitions.json"))?;
let header = read_to_string(path.join("cimgui.h"))?;
let whitelist = parse_whitelist(structs_and_enums, definitions)?;
let mut builder = bindgen::builder()
.raw_line("#![allow(non_upper_case_globals)]")
.raw_line("#![allow(non_camel_case_types)]")
.raw_line("#![allow(non_snake_case)]")
.raw_line("#![allow(clippy::all)]")
.header_contents("cimgui.h", &header)
.rust_target(RustTarget::Stable_1_40)
.default_enum_style(EnumVariation::Consts)
.size_t_is_usize(true)
.prepend_enum_name(false)
.generate_comments(false)
.layout_tests(true)
.derive_copy(true)
.derive_debug(true)
.derive_default(true)
.derive_hash(true)
.derive_partialeq(true)
.derive_eq(true)
.impl_debug(true)
.rustfmt_bindings(true)
.clang_arg("-DCIMGUI_DEFINE_ENUMS_AND_STRUCTS=1");
if let Some(name) = wasm_import_name {
builder = builder.wasm_import_module_name(name);
}
for e in whitelist.structs {
builder = builder.whitelist_type(format!("^{}", e));
}
for e in whitelist.enums {
builder = builder.whitelist_type(format!("^{}", e));
}
for e in whitelist.definitions {
builder = builder.whitelist_function(format!("^{}", e));
}
let bindings = builder.generate().map_err(|_| BindgenError)?;
Ok(bindings)
}
pub fn generate_impl_bindings<P: AsRef<Path>>(
path: &P,
wasm_import_name: Option<String>,
) -> Result<Bindings, Box<dyn Error>> {
let path = path.as_ref();
let structs_and_enums = File::open(path.join("structs_and_enums.json"))?;
let definitions = File::open(path.join("impl_definitions.json"))?;
let header = read_to_string(path.join("cimgui_impl.h"))?;
let whitelist = parse_whitelist(structs_and_enums, definitions)?;
let mut builder = bindgen::builder()
.raw_line("#![allow(non_upper_case_globals)]")
.raw_line("#![allow(non_camel_case_types)]")
.raw_line("#![allow(non_snake_case)]")
.raw_line("#![allow(clippy::all)]")
.header_contents("cimgui_impl.h", &header)
.rust_target(RustTarget::Stable_1_40)
.default_enum_style(EnumVariation::Consts)
.size_t_is_usize(true)
.prepend_enum_name(false)
.generate_comments(false)
.layout_tests(true)
.derive_copy(true)
.derive_debug(true)
.derive_default(true)
.derive_hash(true)
.derive_partialeq(true)
.derive_eq(true)
.impl_debug(true)
.rustfmt_bindings(true)
.clang_arg("-DCIMGUI_DEFINE_ENUMS_AND_STRUCTS=1");
if let Some(name) = wasm_import_name {
builder = builder.wasm_import_module_name(name);
}
for e in whitelist.structs {
builder = builder.whitelist_type(format!("^{}", e));
}
for e in whitelist.enums {
builder = builder.whitelist_type(format!("^{}", e));
}
for e in whitelist.definitions {
builder = builder.whitelist_function(format!("^{}", e));
}
let bindings = builder.generate().map_err(|_| BindgenError)?;
Ok(bindings)
}
| StructsAndEnums |
order_entry.py | # coding: utf-8
"""
Powerbot Server
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.0.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class OrderEntry(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'side': 'str',
'prod': 'str',
'quantity': 'float',
'price': 'float',
'display_qty': 'int',
'contract_id': 'int',
'contract_name': 'str',
'cl_ordr_id': 'str',
'clearing_acct_type': 'str',
'ordr_exe_restriction': 'str',
'pre_arranged': 'bool',
'pre_arranged_acct': 'str',
'type': 'str',
'validity_res': 'str',
'state': 'str',
'validity_date': 'datetime',
'txt': 'str',
'ppd': 'int',
'dlvry_start': 'datetime',
'dlvry_end': 'datetime'
}
attribute_map = {
'side': 'side',
'prod': 'prod',
'quantity': 'quantity',
'price': 'price',
'display_qty': 'displayQty',
'contract_id': 'contractId',
'contract_name': 'contractName',
'cl_ordr_id': 'clOrdrId',
'clearing_acct_type': 'clearingAcctType',
'ordr_exe_restriction': 'ordrExeRestriction',
'pre_arranged': 'preArranged',
'pre_arranged_acct': 'preArrangedAcct',
'type': 'type',
'validity_res': 'validityRes',
'state': 'state',
'validity_date': 'validityDate',
'txt': 'txt',
'ppd': 'ppd',
'dlvry_start': 'dlvryStart',
'dlvry_end': 'dlvryEnd'
}
def __init__(self, side=None, prod=None, quantity=None, price=None, display_qty=None, contract_id=None, contract_name=None, cl_ordr_id=None, clearing_acct_type=None, ordr_exe_restriction='NON', pre_arranged=False, pre_arranged_acct=None, type='O', validity_res='GFS', state=None, validity_date=None, txt=None, ppd=None, dlvry_start=None, dlvry_end=None): # noqa: E501
"""OrderEntry - a model defined in Swagger""" # noqa: E501
self._side = None
self._prod = None
self._quantity = None
self._price = None
self._display_qty = None
self._contract_id = None
self._contract_name = None
self._cl_ordr_id = None
self._clearing_acct_type = None
self._ordr_exe_restriction = None
self._pre_arranged = None
self._pre_arranged_acct = None
self._type = None
self._validity_res = None
self._state = None
self._validity_date = None
self._txt = None
self._ppd = None
self._dlvry_start = None
self._dlvry_end = None
self.discriminator = None
if side is not None:
self.side = side
self.prod = prod
self.quantity = quantity
self.price = price
if display_qty is not None:
self.display_qty = display_qty
if contract_id is not None:
self.contract_id = contract_id
if contract_name is not None:
self.contract_name = contract_name
if cl_ordr_id is not None:
self.cl_ordr_id = cl_ordr_id
self.clearing_acct_type = clearing_acct_type
if ordr_exe_restriction is not None:
self.ordr_exe_restriction = ordr_exe_restriction
if pre_arranged is not None:
self.pre_arranged = pre_arranged
if pre_arranged_acct is not None:
self.pre_arranged_acct = pre_arranged_acct
if type is not None:
self.type = type
if validity_res is not None:
self.validity_res = validity_res
if state is not None:
self.state = state
if validity_date is not None:
self.validity_date = validity_date
if txt is not None:
self.txt = txt
if ppd is not None:
self.ppd = ppd
if dlvry_start is not None:
self.dlvry_start = dlvry_start
if dlvry_end is not None:
self.dlvry_end = dlvry_end
@property
def side(self):
"""Gets the side of this OrderEntry. # noqa: E501
:return: The side of this OrderEntry. # noqa: E501
:rtype: str
"""
return self._side
@side.setter
def side(self, side):
"""Sets the side of this OrderEntry.
:param side: The side of this OrderEntry. # noqa: E501
:type: str
"""
allowed_values = ["SELL", "BUY"] # noqa: E501
if side not in allowed_values:
raise ValueError(
"Invalid value for `side` ({0}), must be one of {1}" # noqa: E501
.format(side, allowed_values)
)
self._side = side
@property
def prod(self):
"""Gets the prod of this OrderEntry. # noqa: E501
:return: The prod of this OrderEntry. # noqa: E501
:rtype: str
"""
return self._prod
@prod.setter
def prod(self, prod):
"""Sets the prod of this OrderEntry.
:param prod: The prod of this OrderEntry. # noqa: E501
:type: str
"""
if prod is None:
raise ValueError("Invalid value for `prod`, must not be `None`") # noqa: E501
self._prod = prod
@property
def quantity(self):
"""Gets the quantity of this OrderEntry. # noqa: E501
:return: The quantity of this OrderEntry. # noqa: E501
:rtype: float
"""
return self._quantity
@quantity.setter
def quantity(self, quantity):
"""Sets the quantity of this OrderEntry.
:param quantity: The quantity of this OrderEntry. # noqa: E501
:type: float
"""
if quantity is None:
raise ValueError("Invalid value for `quantity`, must not be `None`") # noqa: E501
self._quantity = quantity
@property
def price(self):
"""Gets the price of this OrderEntry. # noqa: E501
:return: The price of this OrderEntry. # noqa: E501
:rtype: float
"""
return self._price
@price.setter
def price(self, price):
"""Sets the price of this OrderEntry.
:param price: The price of this OrderEntry. # noqa: E501
:type: float
"""
if price is None:
raise ValueError("Invalid value for `price`, must not be `None`") # noqa: E501
self._price = price
@property
def display_qty(self):
"""Gets the display_qty of this OrderEntry. # noqa: E501
:return: The display_qty of this OrderEntry. # noqa: E501
:rtype: int
"""
return self._display_qty
@display_qty.setter
def display_qty(self, display_qty):
"""Sets the display_qty of this OrderEntry.
:param display_qty: The display_qty of this OrderEntry. # noqa: E501
:type: int
"""
self._display_qty = display_qty
@property
def contract_id(self):
"""Gets the contract_id of this OrderEntry. # noqa: E501
:return: The contract_id of this OrderEntry. # noqa: E501
:rtype: int
"""
return self._contract_id
@contract_id.setter
def contract_id(self, contract_id):
"""Sets the contract_id of this OrderEntry.
:param contract_id: The contract_id of this OrderEntry. # noqa: E501
:type: int
"""
self._contract_id = contract_id
@property
def contract_name(self):
"""Gets the contract_name of this OrderEntry. # noqa: E501
Set a contract name instead of the contractId, and the attempt is made to look up the contract via it's name. If contractId is ist, the contractName field is ignored. # noqa: E501
:return: The contract_name of this OrderEntry. # noqa: E501
:rtype: str
"""
return self._contract_name
@contract_name.setter
def contract_name(self, contract_name):
"""Sets the contract_name of this OrderEntry.
Set a contract name instead of the contractId, and the attempt is made to look up the contract via it's name. If contractId is ist, the contractName field is ignored. # noqa: E501
:param contract_name: The contract_name of this OrderEntry. # noqa: E501
:type: str
"""
self._contract_name = contract_name
@property
def cl_ordr_id(self):
"""Gets the cl_ordr_id of this OrderEntry. # noqa: E501
:return: The cl_ordr_id of this OrderEntry. # noqa: E501
:rtype: str
"""
return self._cl_ordr_id
@cl_ordr_id.setter
def cl_ordr_id(self, cl_ordr_id):
"""Sets the cl_ordr_id of this OrderEntry.
:param cl_ordr_id: The cl_ordr_id of this OrderEntry. # noqa: E501
:type: str
"""
if cl_ordr_id is not None and len(cl_ordr_id) > 40:
raise ValueError("Invalid value for `cl_ordr_id`, length must be less than or equal to `40`") # noqa: E501
self._cl_ordr_id = cl_ordr_id
@property
def clearing_acct_type(self):
"""Gets the clearing_acct_type of this OrderEntry. # noqa: E501
:return: The clearing_acct_type of this OrderEntry. # noqa: E501
:rtype: str
"""
return self._clearing_acct_type
@clearing_acct_type.setter
def clearing_acct_type(self, clearing_acct_type):
"""Sets the clearing_acct_type of this OrderEntry.
:param clearing_acct_type: The clearing_acct_type of this OrderEntry. # noqa: E501
:type: str
"""
if clearing_acct_type is None:
raise ValueError("Invalid value for `clearing_acct_type`, must not be `None`") # noqa: E501
self._clearing_acct_type = clearing_acct_type
@property
def ordr_exe_restriction(self):
"""Gets the ordr_exe_restriction of this OrderEntry. # noqa: E501
:return: The ordr_exe_restriction of this OrderEntry. # noqa: E501
:rtype: str
"""
return self._ordr_exe_restriction
@ordr_exe_restriction.setter
def ordr_exe_restriction(self, ordr_exe_restriction):
"""Sets the ordr_exe_restriction of this OrderEntry.
:param ordr_exe_restriction: The ordr_exe_restriction of this OrderEntry. # noqa: E501
:type: str
"""
allowed_values = ["FOK", "IOC", "NON", "AON", "AU"] # noqa: E501
if ordr_exe_restriction not in allowed_values:
raise ValueError(
"Invalid value for `ordr_exe_restriction` ({0}), must be one of {1}" # noqa: E501
.format(ordr_exe_restriction, allowed_values)
)
self._ordr_exe_restriction = ordr_exe_restriction
@property
def pre_arranged(self):
"""Gets the pre_arranged of this OrderEntry. # noqa: E501
:return: The pre_arranged of this OrderEntry. # noqa: E501
:rtype: bool
"""
return self._pre_arranged
@pre_arranged.setter
def pre_arranged(self, pre_arranged):
"""Sets the pre_arranged of this OrderEntry.
:param pre_arranged: The pre_arranged of this OrderEntry. # noqa: E501
:type: bool
"""
self._pre_arranged = pre_arranged
@property
def pre_arranged_acct(self):
"""Gets the pre_arranged_acct of this OrderEntry. # noqa: E501
:return: The pre_arranged_acct of this OrderEntry. # noqa: E501
:rtype: str
"""
return self._pre_arranged_acct
@pre_arranged_acct.setter
def pre_arranged_acct(self, pre_arranged_acct):
"""Sets the pre_arranged_acct of this OrderEntry.
:param pre_arranged_acct: The pre_arranged_acct of this OrderEntry. # noqa: E501
:type: str
"""
self._pre_arranged_acct = pre_arranged_acct
@property
def type(self):
"""Gets the type of this OrderEntry. # noqa: E501
:return: The type of this OrderEntry. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this OrderEntry.
:param type: The type of this OrderEntry. # noqa: E501
:type: str
"""
allowed_values = ["B", "O", "I", "L", "S", "H", "C", "N", "E"] # noqa: E501
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}" # noqa: E501
.format(type, allowed_values)
)
self._type = type
@property
def validity_res(self):
"""Gets the validity_res of this OrderEntry. # noqa: E501
:return: The validity_res of this OrderEntry. # noqa: E501
:rtype: str
"""
return self._validity_res
|
:param validity_res: The validity_res of this OrderEntry. # noqa: E501
:type: str
"""
allowed_values = ["GFS", "GTD", "NON"] # noqa: E501
if validity_res not in allowed_values:
raise ValueError(
"Invalid value for `validity_res` ({0}), must be one of {1}" # noqa: E501
.format(validity_res, allowed_values)
)
self._validity_res = validity_res
@property
def state(self):
"""Gets the state of this OrderEntry. # noqa: E501
:return: The state of this OrderEntry. # noqa: E501
:rtype: str
"""
return self._state
@state.setter
def state(self, state):
"""Sets the state of this OrderEntry.
:param state: The state of this OrderEntry. # noqa: E501
:type: str
"""
allowed_values = ["ACTI", "HIBE"] # noqa: E501
if state not in allowed_values:
raise ValueError(
"Invalid value for `state` ({0}), must be one of {1}" # noqa: E501
.format(state, allowed_values)
)
self._state = state
@property
def validity_date(self):
"""Gets the validity_date of this OrderEntry. # noqa: E501
:return: The validity_date of this OrderEntry. # noqa: E501
:rtype: datetime
"""
return self._validity_date
@validity_date.setter
def validity_date(self, validity_date):
"""Sets the validity_date of this OrderEntry.
:param validity_date: The validity_date of this OrderEntry. # noqa: E501
:type: datetime
"""
self._validity_date = validity_date
@property
def txt(self):
"""Gets the txt of this OrderEntry. # noqa: E501
:return: The txt of this OrderEntry. # noqa: E501
:rtype: str
"""
return self._txt
@txt.setter
def txt(self, txt):
"""Sets the txt of this OrderEntry.
:param txt: The txt of this OrderEntry. # noqa: E501
:type: str
"""
if txt is not None and len(txt) > 250:
raise ValueError("Invalid value for `txt`, length must be less than or equal to `250`") # noqa: E501
self._txt = txt
@property
def ppd(self):
"""Gets the ppd of this OrderEntry. # noqa: E501
:return: The ppd of this OrderEntry. # noqa: E501
:rtype: int
"""
return self._ppd
@ppd.setter
def ppd(self, ppd):
"""Sets the ppd of this OrderEntry.
:param ppd: The ppd of this OrderEntry. # noqa: E501
:type: int
"""
self._ppd = ppd
@property
def dlvry_start(self):
"""Gets the dlvry_start of this OrderEntry. # noqa: E501
:return: The dlvry_start of this OrderEntry. # noqa: E501
:rtype: datetime
"""
return self._dlvry_start
@dlvry_start.setter
def dlvry_start(self, dlvry_start):
"""Sets the dlvry_start of this OrderEntry.
:param dlvry_start: The dlvry_start of this OrderEntry. # noqa: E501
:type: datetime
"""
self._dlvry_start = dlvry_start
@property
def dlvry_end(self):
"""Gets the dlvry_end of this OrderEntry. # noqa: E501
:return: The dlvry_end of this OrderEntry. # noqa: E501
:rtype: datetime
"""
return self._dlvry_end
@dlvry_end.setter
def dlvry_end(self, dlvry_end):
"""Sets the dlvry_end of this OrderEntry.
:param dlvry_end: The dlvry_end of this OrderEntry. # noqa: E501
:type: datetime
"""
self._dlvry_end = dlvry_end
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, OrderEntry):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other | @validity_res.setter
def validity_res(self, validity_res):
"""Sets the validity_res of this OrderEntry.
|
fixed_text_rnn_classification_test.py | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for task_set.tasks.fixed_text_rnn_classification."""
from absl.testing import parameterized
from task_set import registry
from task_set.tasks import family_test_utils
from task_set.tasks.fixed import fixed_text_rnn_classification # pylint: disable=unused-import
import tensorflow.compat.v1 as tf
class FixedTextRNNClassificationTest(family_test_utils.SingleTaskTestCase):
def test_right_number_of_tasks(self):
task_names = registry.task_registry.get_all_fixed_config_names()
self.assertLen(task_names, 12)
@parameterized.parameters(registry.task_registry.get_all_fixed_config_names())
def test_tasks(self, task_name):
|
if __name__ == "__main__":
tf.test.main()
| self.task_test(registry.task_registry.get_instance(task_name)) |
fake_http_client.go | package fakes
import (
"net/http"
boshhttp "github.com/cloudfoundry/bosh-agent/http"
)
type doInput struct {
req *http.Request
}
type doOutput struct {
resp *http.Response
err error
}
type FakeClient struct {
StatusCode int
CallCount int
Error error
returnNilResponse bool
RequestBodies []string
Requests []*http.Request
responseMessage string
doBehavior []doOutput
}
func NewFakeClient() (fakeClient *FakeClient) {
fakeClient = &FakeClient{}
return
}
func (c *FakeClient) SetMessage(message string) {
c.responseMessage = message
}
func (c *FakeClient) SetNilResponse() {
c.returnNilResponse = true
}
func (c *FakeClient) Do(req *http.Request) (*http.Response, error) {
c.CallCount++
if req.Body != nil {
content, err := boshhttp.ReadAndClose(req.Body)
if err != nil {
return nil, err
}
c.RequestBodies = append(c.RequestBodies, string(content))
}
c.Requests = append(c.Requests, req)
if len(c.doBehavior) > 0 |
var resp *http.Response
if !c.returnNilResponse {
resp = &http.Response{
Body: boshhttp.NewStringReadCloser(c.responseMessage),
StatusCode: c.StatusCode,
}
}
err := c.Error
return resp, err
}
func (c *FakeClient) AddDoBehavior(resp *http.Response, err error) {
c.doBehavior = append(c.doBehavior, doOutput{resp: resp, err: err})
}
| {
output := c.doBehavior[0]
c.doBehavior = c.doBehavior[1:]
return output.resp, output.err
} |
hello_world_grpc.pb.go | // Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.2.0
// - protoc v3.18.0
// source: helloworld/hello_world.proto
package hello_world
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// GreeterClient is the client API for Greeter service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type GreeterClient interface {
// Sends a greeting
SayHello(ctx context.Context, in *HelloRequest, opts ...grpc.CallOption) (*HelloReply, error)
}
type greeterClient struct {
cc grpc.ClientConnInterface
}
func NewGreeterClient(cc grpc.ClientConnInterface) GreeterClient {
return &greeterClient{cc}
}
func (c *greeterClient) SayHello(ctx context.Context, in *HelloRequest, opts ...grpc.CallOption) (*HelloReply, error) {
out := new(HelloReply)
err := c.cc.Invoke(ctx, "/helloworld.Greeter/SayHello", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// GreeterServer is the server API for Greeter service.
// All implementations should embed UnimplementedGreeterServer
// for forward compatibility
type GreeterServer interface {
// Sends a greeting
SayHello(context.Context, *HelloRequest) (*HelloReply, error)
}
// UnimplementedGreeterServer should be embedded to have forward compatible implementations.
type UnimplementedGreeterServer struct {
}
func (UnimplementedGreeterServer) SayHello(context.Context, *HelloRequest) (*HelloReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method SayHello not implemented") | // UnsafeGreeterServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to GreeterServer will
// result in compilation errors.
type UnsafeGreeterServer interface {
mustEmbedUnimplementedGreeterServer()
}
func RegisterGreeterServer(s grpc.ServiceRegistrar, srv GreeterServer) {
s.RegisterService(&Greeter_ServiceDesc, srv)
}
func _Greeter_SayHello_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(HelloRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(GreeterServer).SayHello(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/helloworld.Greeter/SayHello",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(GreeterServer).SayHello(ctx, req.(*HelloRequest))
}
return interceptor(ctx, in, info, handler)
}
// Greeter_ServiceDesc is the grpc.ServiceDesc for Greeter service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var Greeter_ServiceDesc = grpc.ServiceDesc{
ServiceName: "helloworld.Greeter",
HandlerType: (*GreeterServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "SayHello",
Handler: _Greeter_SayHello_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "helloworld/hello_world.proto",
} | }
|
test_gcrotmk.py | #!/usr/bin/env python
"""Tests for the linalg.isolve.gcrotmk module
"""
from numpy.testing import (assert_, assert_allclose, assert_equal,
suppress_warnings)
import numpy as np
from numpy import zeros, array, allclose
from scipy.linalg import norm
from scipy.sparse import csr_matrix, eye, rand
from scipy.sparse.linalg.interface import LinearOperator
from scipy.sparse.linalg import splu
from scipy.sparse.linalg.isolve import gcrotmk, gmres
Am = csr_matrix(array([[-2,1,0,0,0,9],
[1,-2,1,0,5,0],
[0,1,-2,1,0,0],
[0,0,1,-2,1,0],
[0,3,0,1,-2,1],
[1,0,0,0,1,-2]]))
b = array([1,2,3,4,5,6])
count = [0]
def matvec(v):
count[0] += 1
return Am*v
A = LinearOperator(matvec=matvec, shape=Am.shape, dtype=Am.dtype)
def do_solve(**kw):
count[0] = 0
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, ".*called without specifying.*")
x0, flag = gcrotmk(A, b, x0=zeros(A.shape[0]), tol=1e-14, **kw)
count_0 = count[0]
assert_(allclose(A*x0, b, rtol=1e-12, atol=1e-12), norm(A*x0-b))
return x0, count_0
class TestGCROTMK(object):
def test_preconditioner(self):
# Check that preconditioning works
pc = splu(Am.tocsc())
M = LinearOperator(matvec=pc.solve, shape=A.shape, dtype=A.dtype)
x0, count_0 = do_solve()
x1, count_1 = do_solve(M=M)
assert_equal(count_1, 3)
assert_(count_1 < count_0/2)
assert_(allclose(x1, x0, rtol=1e-14))
def test_arnoldi(self):
np.random.seed(1)
A = eye(2000) + rand(2000, 2000, density=5e-4)
b = np.random.rand(2000)
# The inner arnoldi should be equivalent to gmres
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, ".*called without specifying.*")
x0, flag0 = gcrotmk(A, b, x0=zeros(A.shape[0]), m=15, k=0, maxiter=1)
x1, flag1 = gmres(A, b, x0=zeros(A.shape[0]), restart=15, maxiter=1)
assert_equal(flag0, 1)
assert_equal(flag1, 1)
assert np.linalg.norm(A.dot(x0) - b) > 1e-3
assert_allclose(x0, x1)
def test_cornercase(self):
np.random.seed(1234)
# Rounding error may prevent convergence with tol=0 --- ensure
# that the return values in this case are correct, and no
# exceptions are raised
for n in [3, 5, 10, 100]:
A = 2*eye(n)
| x, info = gcrotmk(A, b, maxiter=10)
assert_equal(info, 0)
assert_allclose(A.dot(x) - b, 0, atol=1e-14)
x, info = gcrotmk(A, b, tol=0, maxiter=10)
if info == 0:
assert_allclose(A.dot(x) - b, 0, atol=1e-14)
b = np.random.rand(n)
x, info = gcrotmk(A, b, maxiter=10)
assert_equal(info, 0)
assert_allclose(A.dot(x) - b, 0, atol=1e-14)
x, info = gcrotmk(A, b, tol=0, maxiter=10)
if info == 0:
assert_allclose(A.dot(x) - b, 0, atol=1e-14)
def test_nans(self):
A = eye(3, format='lil')
A[1,1] = np.nan
b = np.ones(3)
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, ".*called without specifying.*")
x, info = gcrotmk(A, b, tol=0, maxiter=10)
assert_equal(info, 1)
def test_truncate(self):
np.random.seed(1234)
A = np.random.rand(30, 30) + np.eye(30)
b = np.random.rand(30)
for truncate in ['oldest', 'smallest']:
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, ".*called without specifying.*")
x, info = gcrotmk(A, b, m=10, k=10, truncate=truncate, tol=1e-4,
maxiter=200)
assert_equal(info, 0)
assert_allclose(A.dot(x) - b, 0, atol=1e-3)
def test_CU(self):
for discard_C in (True, False):
# Check that C,U behave as expected
CU = []
x0, count_0 = do_solve(CU=CU, discard_C=discard_C)
assert_(len(CU) > 0)
assert_(len(CU) <= 6)
if discard_C:
for c, u in CU:
assert_(c is None)
# should converge immediately
x1, count_1 = do_solve(CU=CU, discard_C=discard_C)
if discard_C:
assert_equal(count_1, 2 + len(CU))
else:
assert_equal(count_1, 3)
assert_(count_1 <= count_0/2)
assert_allclose(x1, x0, atol=1e-14)
def test_denormals(self):
# Check that no warnings are emitted if the matrix contains
# numbers for which 1/x has no float representation, and that
# the solver behaves properly.
A = np.array([[1, 2], [3, 4]], dtype=float)
A *= 100 * np.nextafter(0, 1)
b = np.array([1, 1])
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, ".*called without specifying.*")
xp, info = gcrotmk(A, b)
if info == 0:
assert_allclose(A.dot(xp), b) | with suppress_warnings() as sup:
sup.filter(DeprecationWarning, ".*called without specifying.*")
b = np.ones(n) |
problem03.py | def divide(num):
for i in xrange(2,num/2) :
if(num%i == 0) :
return i, num/i
return -1, num
def soinsu(num) :
insu = []
if num < 2 :
return num
current = divide(num)
'''print current''' | while current[0] != -1 :
insu.append(current[0])
current = divide(current[1])
print current
insu.append(current[1])
return insu
print soinsu(600851475143) | |
endpoint.py | from lithopscloud.modules.gen2.endpoint import EndpointConfig
from typing import Any, Dict
from lithopscloud.modules.utils import get_region_by_endpoint
class RayEndpointConfig(EndpointConfig):
def __init__(self, base_config: Dict[str, Any]) -> None:
super().__init__(base_config)
base_endpoint = self.base_config['provider'].get('endpoint')
self.defaults['region'] = get_region_by_endpoint(base_endpoint) if base_endpoint else None
def update_config(self, endpoint):
| self.base_config['provider']['endpoint'] = endpoint
self.base_config['provider']['region'] = get_region_by_endpoint(endpoint) |
|
default.rs | use anyhow::{anyhow, Context as _};
use cid::Cid;
use fvm_shared::address::Address;
use fvm_shared::blockstore::{Blockstore, Buffered};
use fvm_shared::clock::ChainEpoch;
use fvm_shared::econ::TokenAmount;
use fvm_shared::error::ExitCode;
use fvm_shared::version::NetworkVersion;
use fvm_shared::ActorID;
use log::Level::Trace;
use log::{debug, log_enabled, trace};
use num_traits::Signed;
use wasmtime::{Engine, Module};
use super::{Machine, MachineContext};
use crate::blockstore::BufferedBlockstore;
use crate::externs::Externs;
use crate::gas::price_list_by_epoch;
use crate::init_actor::{State, INIT_ACTOR_ADDR};
use crate::kernel::{ClassifyResult, Context as _, Result};
use crate::state_tree::{ActorState, StateTree};
use crate::{syscall_error, Config};
/// The core of the FVM.
///
/// ## Generic types
/// * B => Blockstore.
/// * E => Externs.
/// * K => Kernel.
pub struct DefaultMachine<B, E> {
config: Config,
/// The context for the execution.
context: MachineContext,
/// The wasmtime engine is created on construction of the DefaultMachine, and
/// is dropped when the DefaultMachine is dropped.
engine: Engine,
/// Boundary A calls are handled through externs. These are calls from the
/// FVM to the Filecoin node.
externs: E,
/// The state tree. It is updated with the results from every message
/// execution as the call stack for every message concludes.
///
/// Owned.
state_tree: StateTree<BufferedBlockstore<B>>,
}
impl<B, E> DefaultMachine<B, E>
where
B: Blockstore + 'static,
E: Externs + 'static,
{
// ISSUE: #249
#[allow(clippy::too_many_arguments)]
pub fn new(
config: Config,
epoch: ChainEpoch,
base_fee: TokenAmount,
base_circ_supply: TokenAmount,
network_version: NetworkVersion,
state_root: Cid,
blockstore: B,
externs: E,
) -> anyhow::Result<Self> {
debug!(
"initializing a new machine, epoch={}, base_fee={}, nv={:?}, root={}",
epoch, &base_fee, network_version, state_root
);
let context = MachineContext {
epoch,
base_fee,
base_circ_supply,
network_version,
initial_state_root: state_root,
price_list: price_list_by_epoch(epoch),
debug: config.debug,
};
// Initialize the WASM engine.
let engine = Engine::new(&config.engine)?;
if !blockstore
.has(&context.initial_state_root)
.context("failed to load initial state-root")?
{
return Err(anyhow!(
"blockstore doesn't have the initial state-root {}",
&context.initial_state_root
));
}
let bstore = BufferedBlockstore::new(blockstore);
let state_tree = StateTree::new_from_root(bstore, &context.initial_state_root)?;
if log_enabled!(Trace) {
trace_actors(&state_tree);
}
Ok(DefaultMachine {
config,
context,
engine,
externs,
state_tree,
})
}
}
/// Print a trace of all actors and their state roots.
#[cold]
fn trace_actors<B: Blockstore>(state_tree: &StateTree<B>) {
trace!("init actor address: {}", INIT_ACTOR_ADDR.to_string());
state_tree
.for_each(|addr, actor_state| {
trace!(
"state tree: {} ({:?}): {:?}",
addr.to_string(),
addr.to_bytes(),
actor_state
);
Ok(())
})
.unwrap(); // This will never panic.
match State::load(state_tree) {
Ok((state, _)) => trace!("init actor: {:?}", state),
Err(err) => trace!("init actor: failed to load state; err={:?}", err),
}
}
impl<B, E> Machine for DefaultMachine<B, E>
where
B: Blockstore + 'static,
E: Externs + 'static,
{
type Blockstore = BufferedBlockstore<B>;
type Externs = E;
fn engine(&self) -> &Engine {
&self.engine
}
fn config(&self) -> &Config {
&self.config
}
fn blockstore(&self) -> &Self::Blockstore {
self.state_tree.store()
}
fn context(&self) -> &MachineContext {
&self.context
}
fn externs(&self) -> &Self::Externs {
&self.externs
}
fn state_tree(&self) -> &StateTree<Self::Blockstore> |
fn state_tree_mut(&mut self) -> &mut StateTree<Self::Blockstore> {
&mut self.state_tree
}
/// Flushes the state-tree and returns the new root CID.
///
/// This method also flushes all new blocks (reachable from this new root CID) from the write
/// buffer into the underlying blockstore (the blockstore with which the machine was
/// constructed).
fn flush(&mut self) -> Result<Cid> {
let root = self.state_tree_mut().flush()?;
self.blockstore().flush(&root).or_fatal()?;
Ok(root)
}
/// Creates an uninitialized actor.
// TODO: Remove
fn create_actor(&mut self, addr: &Address, act: ActorState) -> Result<ActorID> {
let state_tree = self.state_tree_mut();
let addr_id = state_tree
.register_new_address(addr)
.context("failed to register new address")
.or_fatal()?;
state_tree
.set_actor(&Address::new_id(addr_id), act)
.context("failed to set actor")
.or_fatal()?;
Ok(addr_id)
}
#[cfg(feature = "builtin_actors")]
fn load_module(&self, code: &Cid) -> Result<Module> {
use anyhow::Context;
// TODO: cache compiled code, and modules?
let binary = if code == &*crate::builtin::SYSTEM_ACTOR_CODE_ID {
fvm_actor_system::wasm::WASM_BINARY
} else if code == &*crate::builtin::INIT_ACTOR_CODE_ID {
fvm_actor_init::wasm::WASM_BINARY
} else if code == &*crate::builtin::CRON_ACTOR_CODE_ID {
fvm_actor_cron::wasm::WASM_BINARY
} else if code == &*crate::builtin::ACCOUNT_ACTOR_CODE_ID {
fvm_actor_account::wasm::WASM_BINARY
} else if code == &*crate::builtin::POWER_ACTOR_CODE_ID {
fvm_actor_power::wasm::WASM_BINARY
} else if code == &*crate::builtin::MINER_ACTOR_CODE_ID {
fvm_actor_miner::wasm::WASM_BINARY
} else if code == &*crate::builtin::MARKET_ACTOR_CODE_ID {
fvm_actor_market::wasm::WASM_BINARY
} else if code == &*crate::builtin::PAYCH_ACTOR_CODE_ID {
fvm_actor_paych::wasm::WASM_BINARY
} else if code == &*crate::builtin::MULTISIG_ACTOR_CODE_ID {
fvm_actor_multisig::wasm::WASM_BINARY
} else if code == &*crate::builtin::REWARD_ACTOR_CODE_ID {
fvm_actor_reward::wasm::WASM_BINARY
} else if code == &*crate::builtin::VERIFREG_ACTOR_CODE_ID {
fvm_actor_verifreg::wasm::WASM_BINARY
} else {
None
};
let binary = binary.context("missing wasm binary").or_fatal()?;
let module = Module::new(&self.engine, binary).or_fatal()?;
Ok(module)
}
#[cfg(not(feature = "builtin_actors"))]
fn load_module(&self, _code: &Cid) -> Result<Module> {
Err(crate::kernel::ExecutionError::Fatal(anyhow!(
"built-in actors not embedded; please run build enabling the builtin_actors feature"
)))
}
fn transfer(&mut self, from: ActorID, to: ActorID, value: &TokenAmount) -> Result<()> {
if from == to {
return Ok(());
}
if value.is_negative() {
return Err(syscall_error!(SysErrForbidden;
"attempted to transfer negative transfer value {}", value)
.into());
}
let mut from_actor = self
.state_tree
.get_actor_id(from)?
.context("cannot transfer from non-existent sender")
.or_error(ExitCode::SysErrSenderInvalid)?;
let mut to_actor = self
.state_tree
.get_actor_id(to)?
.context("cannot transfer to non-existent receiver")
.or_error(ExitCode::SysErrInvalidReceiver)?;
from_actor.deduct_funds(value).map_err(|e| {
syscall_error!(SysErrInsufficientFunds;
"transfer failed when deducting funds ({}) from balance ({}): {}",
value, &from_actor.balance, e)
})?;
to_actor.deposit_funds(value);
self.state_tree.set_actor_id(from, from_actor)?;
self.state_tree.set_actor_id(to, to_actor)?;
log::trace!("transfered {} from {} to {}", value, from, to);
Ok(())
}
fn consume(self) -> Self::Blockstore {
self.state_tree.consume()
}
}
| {
&self.state_tree
} |
photo.rs | pub static PHOTO: &str =
"+.::-============+++=+++=-----====+++++****++++=-:
=:=+**###########**+=--. .:--=+*####%%%%%%#####**
+-+*##%%%%%##+=:. :-+%%%%%%%%%##*
++*##%%%##+-. :=#%%%%%##+
++######*=. :+%%%##+
++####**= ... . .*%##+
++###**+ . : . ::. +*+*#+
+*#***+. :--=#%- :-. .:-===- -%%##+
**#**+-- :: .+*#%%%%+: . :-==:+-%%##*
**###**+..#- ++++*##%*: :.:: . :%*%%##+
+*#####*+ -####%%%%+:.-===*+. .=:#%%%%##+ | +:=+++++==:-- .. :==++=:++==+**************++=:"; | +*#######+==- =%%%%%%%%#%%%#**=. .:%%%%%%%%##+
++**-.:::::=*+:. :-+#%%%%%%#**=. :+%%=%%%%%%%%#*+
++++- ::: . .: --*%%%%%%%%%%%%##*+
**+=--==-. : -=-::=.+=**#*#%%%%%%%%%%%####*+ |
hyperparameter.py | import pandas as pd
import itertools
from functools import partial
from fastai.callbacks import CSVLogger
def get_config_df(config):
df = pd.DataFrame(list(itertools.product(*config.values())), columns=config.keys())
df.index = [f'model_{i+1}' for i in range(len(df))]
return df
def create_experiment(nm, path, folder='results'):
|
def record_experiment(learn, fn, exp_path):
learn.callback_fns.append(partial(CSVLogger, filename=exp_path/fn))
def load_results(exp_path):
config_df = pd.read_csv(exp_path/'config.csv', index_col=0)
param_names = config_df.columns.values
recorder_df=[]
for p in exp_path.ls():
if p.name.startswith(tuple(config_df.index.values)):
df = pd.read_csv(p)
ind_name, fold_name = p.stem.split('-')
df['index']=ind_name
df['fold']=int(fold_name.split('_')[-1].split('.')[0])
recorder_df.append(df)
recorder_df = pd.concat(recorder_df)
metric_names = list(set(recorder_df.columns).symmetric_difference(['index', 'epoch', 'train_loss', 'fold']))
recorder_df = recorder_df.merge(config_df.reset_index())
return config_df, recorder_df, param_names, metric_names
def summarise_results(recorder_df, param_names, metric_names):
return (recorder_df.groupby(['index', *param_names, 'epoch'], as_index=False)
.agg({k:['mean', 'std'] for k in metric_names})) | exp_path = (path/folder/nm)
exp_path.mkdir(exist_ok=True)
return nm, exp_path |
operations.rs | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
pub mod operations {
use crate::models::*;
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.SignalRService/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(|source| list::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| list::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| list::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| list::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod signal_r {
use crate::models::*;
pub async fn check_name_availability(
operation_config: &crate::OperationConfig,
location: &str,
parameters: Option<&NameAvailabilityParameters>,
subscription_id: &str,
) -> std::result::Result<NameAvailability, check_name_availability::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.SignalRService/locations/{}/checkNameAvailability",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(|source| check_name_availability::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| check_name_availability::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = if let Some(parameters) = parameters {
azure_core::to_json(parameters).map_err(|source| check_name_availability::Error::SerializeError { source })?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| check_name_availability::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| check_name_availability::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: NameAvailability =
serde_json::from_slice(rsp_body).map_err(|source| check_name_availability::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(check_name_availability::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod check_name_availability {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_by_subscription(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<SignalRResourceList, list_by_subscription::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.SignalRService/SignalR",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(|source| list_by_subscription::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| list_by_subscription::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| list_by_subscription::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| list_by_subscription::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: SignalRResourceList =
serde_json::from_slice(rsp_body).map_err(|source| list_by_subscription::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_by_subscription::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_by_subscription {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_by_resource_group(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
) -> std::result::Result<SignalRResourceList, list_by_resource_group::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SignalRService/SignalR",
operation_config.base_path(),
subscription_id,
resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(|source| list_by_resource_group::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| list_by_resource_group::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| list_by_resource_group::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| list_by_resource_group::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: SignalRResourceList =
serde_json::from_slice(rsp_body).map_err(|source| list_by_resource_group::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_by_resource_group::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_by_resource_group {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_keys(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<SignalRKeys, list_keys::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SignalRService/SignalR/{}/listKeys",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(|source| list_keys::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| list_keys::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| list_keys::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| list_keys::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: SignalRKeys = serde_json::from_slice(rsp_body).map_err(|source| list_keys::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list_keys::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list_keys {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn regenerate_key(
operation_config: &crate::OperationConfig,
parameters: Option<&RegenerateKeyParameters>,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<SignalRKeys, regenerate_key::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SignalRService/SignalR/{}/regenerateKey",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(|source| regenerate_key::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| regenerate_key::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = if let Some(parameters) = parameters {
azure_core::to_json(parameters).map_err(|source| regenerate_key::Error::SerializeError { source })?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| regenerate_key::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| regenerate_key::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: SignalRKeys =
serde_json::from_slice(rsp_body).map_err(|source| regenerate_key::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(regenerate_key::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod regenerate_key {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn | (
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<SignalRResource, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SignalRService/SignalR/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(|source| get::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| get::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| get::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| get::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: SignalRResource = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(get::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn create_or_update(
operation_config: &crate::OperationConfig,
parameters: Option<&SignalRCreateParameters>,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<create_or_update::Response, create_or_update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SignalRService/SignalR/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(|source| create_or_update::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| create_or_update::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = if let Some(parameters) = parameters {
azure_core::to_json(parameters).map_err(|source| create_or_update::Error::SerializeError { source })?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| create_or_update::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| create_or_update::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: SignalRResource =
serde_json::from_slice(rsp_body).map_err(|source| create_or_update::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(create_or_update::Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = rsp.body();
let rsp_value: SignalRResource =
serde_json::from_slice(rsp_body).map_err(|source| create_or_update::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(create_or_update::Response::Created201(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(create_or_update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod create_or_update {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(SignalRResource),
Created201(SignalRResource),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
parameters: Option<&SignalRUpdateParameters>,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SignalRService/SignalR/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(|source| update::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| update::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = if let Some(parameters) = parameters {
azure_core::to_json(parameters).map_err(|source| update::Error::SerializeError { source })?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| update::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| update::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: SignalRResource = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
Err(update::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod update {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(SignalRResource),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
subscription_id: &str,
resource_group_name: &str,
resource_name: &str,
) -> std::result::Result<delete::Response, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SignalRService/SignalR/{}",
operation_config.base_path(),
subscription_id,
resource_group_name,
resource_name
);
let mut url = url::Url::parse(url_str).map_err(|source| delete::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| delete::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| delete::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| delete::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204),
status_code => {
let rsp_body = rsp.body();
Err(delete::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod delete {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod usages {
use crate::models::*;
pub async fn list(
operation_config: &crate::OperationConfig,
location: &str,
subscription_id: &str,
) -> std::result::Result<SignalRUsageList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.SignalRService/locations/{}/usages",
operation_config.base_path(),
subscription_id,
location
);
let mut url = url::Url::parse(url_str).map_err(|source| list::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| list::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| list::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| list::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: SignalRUsageList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
Err(list::Error::UnexpectedResponse {
status_code,
body: rsp_body.clone(),
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
| get |
service.go | package post
import (
"context"
"github.com/pkg/errors"
"github.com/minipkg/log"
"github.com/minipkg/selection_condition"
"go.mongodb.org/mongo-driver/mongo"
"redditclone/internal/domain/comment"
"redditclone/internal/domain/vote"
"redditclone/internal/pkg/apperror"
)
const MaxLIstLimit = 1000
// IService encapsulates usecase logic for user.
type IService interface {
NewEntity() *Post
NewVoteEntity(userId uint, postId string, val int) *vote.Vote
Get(ctx context.Context, id string) (*Post, error)
//First(ctx context.Context, user *Post) (*Post, error)
Query(ctx context.Context, query selection_condition.SelectionCondition) ([]Post, error)
List(ctx context.Context) ([]Post, error)
//Count(ctx context.Context) (uint, error)
Create(ctx context.Context, entity *Post) error
ViewsIncr(ctx context.Context, entity *Post) error
//Update(ctx context.Context, entity *Post) error
Delete(ctx context.Context, id string) error
Vote(ctx context.Context, entity *vote.Vote) error
Unvote(ctx context.Context, entity *vote.Vote) error
}
type service struct {
//Domain Domain
logger log.ILogger
repository Repository
commentRepository comment.Repository
voteReporitory vote.Repository
}
// NewService creates a new service.
func NewService(logger log.ILogger, repo Repository, commentRepo comment.Repository, voteRepo vote.Repository) IService {
s := &service{
logger: logger,
repository: repo,
commentRepository: commentRepo,
voteReporitory: voteRepo,
}
repo.SetDefaultConditions(s.defaultConditions())
return s
}
// Defaults returns defaults params
func (s *service) defaultConditions() selection_condition.SelectionCondition {
return selection_condition.SelectionCondition{}
}
func (s *service) NewEntity() *Post {
return &Post{}
}
func (s *service) NewVoteEntity(userId uint, postId string, val int) *vote.Vote {
return &vote.Vote{
UserID: userId,
PostID: postId,
Value: val,
}
}
// Get returns the entity with the specified ID.
func (s *service) Get(ctx context.Context, id string) (*Post, error) {
entity, err := s.repository.Get(ctx, id)
if err != nil {
return nil, err
}
return entity, nil
}
/*
// Count returns the number of items.
func (s *service) Count(ctx context.Context) (uint, error) {
return s.repository.Count(ctx)
}*/
// Query returns the items with the specified offset and limit.
func (s *service) Query(ctx context.Context, query selection_condition.SelectionCondition) ([]Post, error) {
items, err := s.repository.Query(ctx, query)
if err != nil {
return nil, errors.Wrapf(err, "Can not find a list of posts by query: %v", query)
}
return items, nil
}
// List returns the items list.
func (s *service) List(ctx context.Context) ([]Post, error) {
items, err := s.repository.Query(ctx, selection_condition.SelectionCondition{})
if err != nil {
return nil, errors.Wrapf(err, "Can not find a list of posts by ctx")
}
return items, nil
}
func (s *service) Create(ctx context.Context, entity *Post) error {
return s.repository.Create(ctx, entity)
}
func (s *service) ViewsIncr(ctx context.Context, entity *Post) error {
entity.Views++
return s.repository.Update(ctx, entity)
}
func (s *service) Delete(ctx context.Context, id string) error {
return s.repository.Delete(ctx, id)
}
func (s *service) Vote(ctx context.Context, entity *vote.Vote) (err error) {
item := &vote.Vote{
PostID: entity.PostID,
UserID: entity.UserID,
}
if item, err = s.voteReporitory.First(ctx, item); err != nil {
if err == apperror.ErrNotFound {
err = s.voteReporitory.Create(ctx, entity)
return s.PostChangeScore(ctx, entity.PostID, entity.Value)
}
return errors.Wrapf(err, "Can not find a vote by params: %v", item)
}
if item.Value == entity.Value {
// no action
return nil
}
item.Value = entity.Value
if err = s.voteReporitory.Update(ctx, item); err != nil {
return err
}
return s.PostChangeScore(ctx, entity.PostID, 2*entity.Value)
}
func (s *service) Unvote(ctx context.Context, entity *vote.Vote) (err error) {
item := &vote.Vote{
PostID: entity.PostID,
UserID: entity.UserID,
}
if item, err = s.voteReporitory.First(ctx, item); err != nil {
if err == apperror.ErrNotFound {
return err
}
return errors.Wrapf(err, "Can not find a vote by params: %v", item)
}
if err = s.voteReporitory.Delete(ctx, item.ID); err != nil |
return s.PostChangeScore(ctx, entity.PostID, -1*entity.Value)
}
func (s *service) PostChangeScore(ctx context.Context, id string, diff int) error {
entity, err := s.repository.Get(ctx, id)
if err != nil {
if err == mongo.ErrNoDocuments {
return errors.Wrapf(apperror.ErrNotFound, "Post id: %q not found", id)
}
return errors.Wrapf(apperror.ErrInternal, "Post id: %q not found", id)
}
entity.Score += diff
err = s.repository.Update(ctx, entity)
if err != nil {
return errors.Wrapf(apperror.ErrInternal, "Can not update post: %v, error: %v", entity, err)
}
return nil
}
| {
return err
} |
podcast_test.go | package podcast
import (
"log"
"os"
"testing"
"github.com/jackc/pgx/v4/pgxpool"
"github.com/sschwartz96/syncapod-backend/internal"
)
var (
dbpg *pgxpool.Pool
)
func TestMain(m *testing.M) {
// spin up docker container and return pgx pool
var dockerCleanFunc func() error
var err error
dbpg, dockerCleanFunc, err = internal.StartDockerDB("db_auth")
if err != nil {
log.Fatalf("auth.TestMain() error setting up docker db: %v", err)
}
// setup db
setupPodcastDB()
// run tests
runCode := m.Run()
// close pgx pool
dbpg.Close()
// cleanup docker container
err = dockerCleanFunc()
if err != nil {
log.Fatalf("podcast.TestMain() error cleaning up docker container: %v", err)
}
os.Exit(runCode)
}
func | () {
}
| setupPodcastDB |
sanctum.interceptor.ts | import { Inject, Injectable, InjectionToken, Optional } from '@angular/core';
import { HttpClient, HttpEvent, HttpHandler, HttpInterceptor, HttpRequest } from '@angular/common/http';
import { Observable } from 'rxjs';
import { BASE_URL } from '@core/interceptors/base-url.interceptor';
import { switchMap } from 'rxjs/operators';
export const SANCTUM_PREFIX = new InjectionToken<string>('SANCTUM_PREFIX');
@Injectable()
export class SanctumInterceptor implements HttpInterceptor {
private ready = false;
constructor(
private http: HttpClient,
@Optional() @Inject(BASE_URL) private baseUrl?: string,
@Optional() @Inject(SANCTUM_PREFIX) private prefix?: string,
) {}
intercept(request: HttpRequest<unknown>, next: HttpHandler): Observable<HttpEvent<unknown>> {
if (this.ready === false) {
this.ready = true;
return this.http.get(this.getUrl()).pipe(
switchMap(() => next.handle(request)),
);
}
return next.handle(request);
}
private getUrl() {
const path = `/${(this.prefix || 'sanctum')}/csrf-cookie`;
| }
const url = new URL(this.baseUrl);
return url.origin + path;
}
} | if (!this.baseUrl) {
return path; |
maps.js | import { border, brand } from "../../variables";
/*
DISCLAIMER:
Do not change this file because it is core styling.
Customizing core files will make updating Atlas much more difficult in the future.
To customize any core styling, copy the part you want to customize to styles/native/app/ so the core styling is overwritten.
==========================================================================
Maps
//== Design Properties
//## Helper classes to change the look and feel of the widget
========================================================================== */
// Maps Colors
export const mapsSuccess = {
marker: {
color: brand.success
}
};
export const mapsWarning = {
marker: { | };
export const mapsDanger = {
marker: {
color: brand.danger
}
};
//
// Maps Size
export const mapsSquare = {
container: {
aspectRatio: 1 / 1
}
};
export const mapsMaxSpace = {
container: {
flex: 1,
aspectRatio: undefined
}
};
//
// == Extra Classes
// ## Helper classes to change the look and feel of the widget
// -------------------------------------------------------------------------------------------------------------------//
// Maps Shape
export const mapsRounded = {
container: {
borderRadius: border.radiusSmall,
overflow: "hidden"
}
}; | color: brand.warning
} |
patcher.py | """Patching utilities for working with fake objects.
See :ref:`using-fudge` for common scenarios.
"""
__all__ = ['patch_object', 'with_patched_object', 'PatchHandler',
'patched_context', 'patch']
import sys
import fudge
from fudge.util import wraps
class patch(object):
"""A test decorator that patches importable names with :class:`fakes <Fake>`
Each fake is exposed as an argument to the test:
.. doctest::
:hide:
>>> import fudge
.. doctest::
>>> @fudge.patch('os.remove')
... def test(fake_remove):
... fake_remove.expects_call()
... # do stuff...
...
>>> test()
Traceback (most recent call last):
...
AssertionError: fake:os.remove() was not called
.. doctest::
:hide:
>>> fudge.clear_expectations()
Many paths can be patched at once:
.. doctest::
>>> @fudge.patch('os.remove',
... 'shutil.rmtree')
... def test(fake_remove, fake_rmtree):
... fake_remove.is_callable()
... # do stuff...
...
>>> test()
For convenience, the patch method calls
:func:`fudge.clear_calls`, :func:`fudge.verify`, and :func:`fudge.clear_expectations`. For that reason, you must manage all your fake objects within the test function itself.
.. note::
If you are using a unittest class, you cannot declare fakes
within ``setUp()`` unless you manually clear calls and clear
expectations. If you do that, you'll want to use the
:func:`fudge.with_fakes` decorator instead of ``@patch``.
"""
def __init__(self, *obj_paths):
|
def __call__(self, fn):
@wraps(fn)
def caller(*args, **kw):
fakes = self.__enter__()
if not isinstance(fakes, (tuple, list)):
fakes = [fakes]
args += tuple(fakes)
value = None
try:
value = fn(*args, **kw)
except:
etype, val, tb = sys.exc_info()
self.__exit__(etype, val, tb)
raise etype, val, tb
else:
self.__exit__(None, None, None)
return value
return caller
def __enter__(self):
fudge.clear_expectations()
fudge.clear_calls()
self.patches = []
all_fakes = []
for path in self.obj_paths:
try:
target, attr = path.rsplit('.', 1)
except (TypeError, ValueError):
raise TypeError(
"Need a valid target to patch. You supplied: %r"
% path)
fake = fudge.Fake(path)
all_fakes.append(fake)
self.patches.append(patch_object(target, attr, fake))
if len(all_fakes) == 1:
return all_fakes[0]
else:
return all_fakes
def __exit__(self, exc_type, exc_val, exc_tb):
try:
if not exc_type:
fudge.verify()
finally:
for p in self.patches:
p.restore()
fudge.clear_expectations()
def with_patched_object(obj, attr_name, patched_value):
"""Decorator that patches an object before the decorated method
is called and restores it afterwards.
This is a wrapper around :func:`fudge.patcher.patch_object`
Example::
>>> from fudge import with_patched_object
>>> class Session:
... state = 'clean'
...
>>> @with_patched_object(Session, "state", "dirty")
... def test():
... print Session.state
...
>>> test()
dirty
>>> print Session.state
clean
"""
def patcher(method):
@wraps(method)
def method_call(*m_args, **m_kw):
patched_obj = patch_object(obj, attr_name, patched_value)
try:
return method(*m_args, **m_kw)
finally:
patched_obj.restore()
return method_call
return patcher
class patched_context(object):
"""A context manager to patch an object temporarily during a `with statement`_ block.
This is a wrapper around :func:`fudge.patcher.patch_object`
.. lame, lame, cannot figure out how to apply __future__ to doctest
so this output is currently skipped
.. doctest:: python25
:options: +SKIP
>>> from fudge import patched_context
>>> class Session:
... state = 'clean'
...
>>> with patched_context(Session, "state", "dirty"): # doctest: +SKIP
... print Session.state
...
dirty
>>> print Session.state
clean
.. _with statement: http://www.python.org/dev/peps/pep-0343/
"""
def __init__(self, obj, attr_name, patched_value):
# note that a @contextmanager decorator would be simpler
# but it can't be used since a value cannot be yielded within a
# try/finally block which is needed to restore the object on finally.
self.patched_object = patch_object(obj, attr_name, patched_value)
def __enter__(self):
return self.patched_object
def __exit__(self, exc_type, exc_val, exc_tb):
self.patched_object.restore()
def patch_object(obj, attr_name, patched_value):
"""Patches an object and returns an instance of :class:`fudge.patcher.PatchHandler` for later restoration.
Note that if *obj* is not an object but a path to a module then it will be imported.
You may want to use a more convenient wrapper :func:`with_patched_object` or :func:`patched_context`
Example::
>>> from fudge import patch_object
>>> class Session:
... state = 'clean'
...
>>> patched_session = patch_object(Session, "state", "dirty")
>>> Session.state
'dirty'
>>> patched_session.restore()
>>> Session.state
'clean'
Here is another example showing how to patch multiple objects at once::
>>> class Session:
... state = 'clean'
...
>>> class config:
... session_strategy = 'database'
...
>>> patches = [
... patch_object(config, "session_strategy", "filesystem"),
... patch_object(Session, "state", "dirty")
... ]
>>> try:
... # your app under test would run here ...
... print "(while patched)"
... print "config.session_strategy=%r" % config.session_strategy
... print "Session.state=%r" % Session.state
... finally:
... for p in patches:
... p.restore()
... print "(patches restored)"
(while patched)
config.session_strategy='filesystem'
Session.state='dirty'
(patches restored)
>>> config.session_strategy
'database'
>>> Session.state
'clean'
"""
if isinstance(obj, (str, unicode)):
obj_path = adjusted_path = obj
done = False
exc = None
at_top_level = False
while not done:
try:
obj = __import__(adjusted_path)
done = True
except ImportError:
# Handle paths that traveerse object attributes.
# Such as: smtplib.SMTP.connect
# smtplib <- module to import
adjusted_path = adjusted_path.rsplit('.', 1)[0]
if not exc:
exc = sys.exc_info()
if at_top_level:
# We're at the top level module and it doesn't exist.
# Raise the first exception since it will make more sense:
etype, val, tb = exc
raise etype, val, tb
if not adjusted_path.count('.'):
at_top_level = True
for part in obj_path.split('.')[1:]:
obj = getattr(obj, part)
handle = PatchHandler(obj, attr_name)
handle.patch(patched_value)
return handle
class NonExistant(object):
"""Represents a non-existant value."""
class PatchHandler(object):
"""Low level patch handler that memorizes a patch so you can restore it later.
You can use more convenient wrappers :func:`with_patched_object` and :func:`patched_context`
"""
def __init__(self, orig_object, attr_name):
self.orig_object = orig_object
self.attr_name = attr_name
self.proxy_object = None
self.orig_value, self.is_local = self._get_original(self.orig_object,
self.attr_name)
self.getter_class, self.getter = self._handle_getter(self.orig_object,
self.attr_name)
def patch(self, patched_value):
"""Set a new value for the attribute of the object."""
try:
if self.getter:
setattr(self.getter_class, self.attr_name, patched_value)
else:
setattr(self.orig_object, self.attr_name, patched_value)
except TypeError:
# Workaround for patching builtin objects:
proxy_name = 'fudge_proxy_%s_%s_%s' % (
self.orig_object.__module__,
self.orig_object.__name__,
patched_value.__class__.__name__
)
self.proxy_object = type(proxy_name, (self.orig_object,),
{self.attr_name: patched_value})
mod = sys.modules[self.orig_object.__module__]
setattr(mod, self.orig_object.__name__, self.proxy_object)
def restore(self):
"""Restore the saved value for the attribute of the object."""
if self.proxy_object is None:
if self.getter:
setattr(self.getter_class, self.attr_name, self.getter)
elif self.is_local:
setattr(self.orig_object, self.attr_name, self.orig_value)
else:
# Was not a local, safe to delete:
delattr(self.orig_object, self.attr_name)
else:
setattr(sys.modules[self.orig_object.__module__],
self.orig_object.__name__,
self.orig_object)
def _find_class_for_attr(self, cls, attr):
if attr in cls.__dict__:
return cls
else:
for base in cls.__bases__:
if self._find_class_for_attr(base, attr) is not NonExistant:
return base
return NonExistant
def _get_original(self, orig_object, name):
try:
value = orig_object.__dict__[name]
is_local = True
except (AttributeError, KeyError):
value = getattr(orig_object, name, NonExistant)
is_local = False
if value is NonExistant:
raise AttributeError(
"%s does not have the attribute %r" % (orig_object, name))
return value, is_local
def _get_exact_original(self, orig_object, name):
if hasattr(orig_object, '__dict__'):
if name not in orig_object.__dict__:
# TODO: handle class objects, not just instance objects?
# This is only here for Class.property.__get__
if hasattr(orig_object, '__class__'):
cls = orig_object.__class__
orig_object = self._find_class_for_attr(cls, name)
return orig_object
def _handle_getter(self, orig_object, name):
getter_class, getter = None, None
exact_orig = self._get_exact_original(orig_object, name)
try:
ob = exact_orig.__dict__[name]
except (AttributeError, KeyError):
pass
else:
if hasattr(ob, '__get__'):
getter_class = exact_orig
getter = ob
return getter_class, getter
| self.obj_paths = obj_paths |
index.ts | import { Injectable } from '@angular/core';
import { Cordova, IonicNativePlugin, Plugin } from '@ionic-native/core';
/**
* Options for the video playback using the `play` function.
*/
export interface VideoOptions {
/**
* Set the initial volume of the video playback, where 0.0 is 0% volume and 1.0 is 100%.
* For example: for a volume of 30% set the value to 0.3.
*/
volume?: number;
/**
* There are two options for the scaling mode. SCALE_TO_FIT which is default and SCALE_TO_FIT_WITH_CROPPING.
* These strings are the only ones which can be passed as option.
*/
scalingMode?: number;
/**
* Allows you to set the fullscreen Dialog cacelable or not incase to play complete video
*/
cancelableDialog?: boolean;
}
/**
* @name Video Player
* @description
* A Cordova plugin that simply allows you to immediately play a video in fullscreen mode.
*
* Requires Cordova plugin: `com.mwj.cordova.androidvideoplayer`. For more info, please see the [AndroidVideoPlayer plugin docs](https://github.com/waqasjamil/cordova-plugin-videoplayer).
*
* @usage
* ```typescript
* import { AndroidVideoPlayer } from '@ionic-native/android-video-player/ngx';
*
* constructor(private videoPlayer: AndroidVideoPlayer) { }
*
* ...
*
* // Playing a video.
* this.videoPlayer.play('file:///android_asset/www/movie.mp4').then(() => {
* console.log('video completed');
* }).catch(err => { | * });
*
* ```
* @interfaces
* VideoOptions
*/
@Plugin({
pluginName: 'AndroidVideoPlayer',
plugin: 'cordova-plugin-android-video-player',
pluginRef: 'AndroidVideoPlayer',
repo: 'https://github.com/waqasjamil/cordova-plugin-videoplayer',
platforms: ['Android'],
})
@Injectable()
export class AndroidVideoPlayer extends IonicNativePlugin {
/**
* Plays the video from the passed url.
* @param fileUrl {string} File url to the video.
* @param options {VideoOptions?} Optional video playback settings. See options above.
* @returns {Promise<any>} Resolves promise when the video was played successfully.
*/
@Cordova()
play(fileUrl: string, options?: VideoOptions): Promise<any> {
return;
}
/**
* Stops the video playback immediatly.
*/
@Cordova({ sync: true })
close(): void {}
} | * console.log(err); |
table_datadog_role.go | package datadog
import (
"context"
"strings"
datadog "github.com/DataDog/datadog-api-client-go/api/v2/datadog"
"github.com/turbot/steampipe-plugin-sdk/v3/grpc/proto"
"github.com/turbot/steampipe-plugin-sdk/v3/plugin"
"github.com/turbot/steampipe-plugin-sdk/v3/plugin/transform"
)
func tableDatadogRole(ctx context.Context) *plugin.Table {
return &plugin.Table{
Name: "datadog_role",
Description: "Roles categorize users and define what account permissions those users have.",
Get: &plugin.GetConfig{
Hydrate: getRole,
KeyColumns: plugin.SingleColumn("id"),
},
List: &plugin.ListConfig{
Hydrate: listRoles,
KeyColumns: plugin.KeyColumnSlice{
{Name: "name", Require: plugin.Optional},
},
},
Columns: []*plugin.Column{
// Top columns | {Name: "user_count", Type: proto.ColumnType_INT, Transform: transform.FromField("Attributes.UserCount"), Description: "Number of users associated with the role."},
{Name: "created_at", Type: proto.ColumnType_TIMESTAMP, Transform: transform.FromField("Attributes.CreatedAt"), Description: "Creation time of the role."},
{Name: "modified_at", Type: proto.ColumnType_TIMESTAMP, Transform: transform.FromField("Attributes.ModifiedAt"), Description: "Time of last role modification."},
// JSON column
{Name: "users", Type: proto.ColumnType_JSON, Hydrate: listRoleUsers, Transform: transform.From(userList), Description: "Set of objects containing the permission ID and the name of the permissions granted to this role."},
{Name: "permissions", Type: proto.ColumnType_JSON, Transform: transform.FromField("Relationships.Permissions.Data"), Description: "List of users emails attached to role."},
},
}
}
func listRoles(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateData) (interface{}, error) {
ctx, apiClient, _, err := connectV2(ctx, d)
if err != nil {
plugin.Logger(ctx).Error("datadog_role.listRoles", "connection_error", err)
return nil, err
}
// https://github.com/DataDog/datadog-api-client-go/blob/master/api/v2/datadog/docs/RolesApi.md#listroles
opts := datadog.ListRolesOptionalParameters{
PageSize: datadog.PtrInt64(int64(100)),
PageNumber: datadog.PtrInt64(int64(0)),
}
name := d.KeyColumnQualString("name")
if name != "" {
opts.WithFilter(name)
}
count := int64(0)
for {
resp, _, err := apiClient.RolesApi.ListRoles(ctx, opts)
if err != nil {
plugin.Logger(ctx).Error("datadog_role.listRoles", "query_error", err)
return nil, err
}
for _, role := range resp.GetData() {
count++
d.StreamListItem(ctx, role)
// Check if context has been cancelled or if the limit has been hit (if specified)
if d.QueryStatus.RowsRemaining(ctx) == 0 {
return nil, nil
}
}
// Break loop if using filter
if resp.Meta.Page.HasTotalFilteredCount() {
if count >= resp.Meta.Page.GetTotalFilteredCount() {
return nil, nil
}
}
// Break loop if not using filter
if count >= resp.Meta.Page.GetTotalCount() {
return nil, nil
}
opts.WithPageNumber(*opts.PageNumber + 1)
}
}
func getRole(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) {
var roleID string
if h.Item != nil {
roleID = *h.Item.(datadog.Role).Id
} else {
roleID = d.KeyColumnQualString("id")
}
if strings.TrimSpace(roleID) == "" {
return nil, nil
}
ctx, apiClient, _, err := connectV2(ctx, d)
if err != nil {
plugin.Logger(ctx).Error("datadog_role.getRole", "connection_error", err)
return nil, err
}
// https://github.com/DataDog/datadog-api-client-go/blob/master/api/v2/datadog/docs/RolesApi.md#GetRole
resp, _, err := apiClient.RolesApi.GetRole(ctx, roleID)
if err != nil {
plugin.Logger(ctx).Error("datadog_role.getRole", "query_error", err)
if err.Error() == "404 Not Found" {
return nil, nil
}
return nil, err
}
return resp.GetData(), nil
}
func listRoleUsers(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) {
role := h.Item.(datadog.Role)
ctx, apiClient, _, err := connectV2(ctx, d)
if err != nil {
plugin.Logger(ctx).Error("datadog_role.listRoleUsers", "connection_error", err)
return nil, err
}
// https://github.com/DataDog/datadog-api-client-go/blob/master/api/v2/datadog/docs/RolesApi.md#listroles
opts := datadog.ListRoleUsersOptionalParameters{
PageSize: datadog.PtrInt64(int64(100)),
PageNumber: datadog.PtrInt64(int64(0)),
}
paging := true
count := int64(0)
var users []datadog.User
for paging {
resp, _, err := apiClient.RolesApi.ListRoleUsers(ctx, *role.Id, opts)
if err != nil {
plugin.Logger(ctx).Error("datadog_role.listRoleUsers", "query_error", err)
}
noOfUsers := len(resp.GetData())
users = append(users, resp.GetData()...)
count += int64(noOfUsers)
if count >= resp.Meta.Page.GetTotalCount() {
paging = false
}
opts.PageNumber = datadog.PtrInt64(*opts.PageNumber + 1)
}
return users, nil
}
//// TRANSFORM FUNCTION
func userList(ctx context.Context, d *transform.TransformData) (interface{}, error) {
users := d.HydrateItem.([]datadog.User)
var user_emails []string
for _, user := range users {
user_emails = append(user_emails, *user.Attributes.Email)
}
return user_emails, nil
} | {Name: "name", Type: proto.ColumnType_STRING, Transform: transform.FromField("Attributes.Name"), Description: "Name of the role."},
{Name: "id", Type: proto.ColumnType_STRING, Description: "Id of the role."}, |
lib.rs | use proc_macro::TokenStream;
use quote::quote;
use syn::{parse::Parser, parse_macro_input, DeriveInput};
#[proc_macro_attribute]
pub fn | (_args: TokenStream, stream: TokenStream) -> TokenStream {
let mut stream = parse_macro_input!(stream as DeriveInput);
let struct_data = match &mut stream.data {
syn::Data::Struct(data) => data,
_ => panic!("`view` macro has to be used with structs"),
};
if let syn::Fields::Named(fields) = &mut struct_data.fields {
fields
.named
.push(syn::Field::parse_named.parse2(quote! { view: ViewBase }).unwrap());
}
quote! { #stream }.into()
}
| view |
GameRoom.tsx | import React, { memo, useContext, useState } from "react";
import { Container, withApp } from "react-pixi-fiber";
import { MainWrapper } from "../environment/MainWrapper";
import Road from "../game/Road";
import { ResourcesContext } from "../../constants/ResourcesContext";
import MainCar from "../game/MainCar";
import { ControlElements } from "../game/ControlElement";
import { TapMoverMainCar } from "../game/TapMoverMainCar";
import { ScoreTable } from "../game/ScoreTable";
import { ElapsedTimer } from "../game/ElapsedTimer";
import { SpriteButton } from "../basic/SpriteButton";
import Clickable from "../basic/Clickable";
import { CarAndCoinsLayer } from "../game/CarAndCoinsLayer";
import {
setAppScreenState,
useAppScreenState,
} from "../../states/appScreenState";
import { MenuLayout } from "../game/Menu";
import { useMedia } from "react-use";
const doNothing = () => {};
export const isTablet = window.innerWidth < 1024;
export enum trackCoordinates {
firstTrack = 45,
secondTrack = 215,
thirdTrack = 385,
fourthTrack = 555,
}
const PauseButton = withApp(
({
texture,
width,
app,
}: {
texture: PIXI.Texture;
width: number;
app: any;
}) => {
return (
<Clickable
onClick={() => {
setAppScreenState({ pause: true });
setTimeout(() => app.ticker.stop(), 100);
}}
>
<SpriteButton
texture={texture}
x={isTablet ? width - 110 : width + 50}
y={33}
/>
</Clickable>
);
}
);
export const GameRoom = memo(() => {
const { resources } = useContext(ResourcesContext);
const [mainCarXCoordinate, setMainCarXCoordinate] = useState<number>(385);
const isGameEnd = useAppScreenState<boolean>("endGame");
const isPause = useAppScreenState<boolean>("pause");
const timeout = useAppScreenState("timeout");
const coinCount = useAppScreenState<number>("coinCount");
const isDisableControls = useAppScreenState("disableControls");
const isPhone = useMedia("(max-width: 480px)");
if (isGameEnd || timeout) {
alert(`Game over. You got ${coinCount} coins.`);
}
if (coinCount === 5) {
alert("You win");
window.location.reload();
}
return (
<Container name="GameRoom">
<MainWrapper
render={(width, height) => (
<TapMoverMainCar
mainCarXCoordinate={mainCarXCoordinate}
setMainCarXCoordinate={
isDisableControls ? doNothing : setMainCarXCoordinate
}
>
<Road | y={0}
width={isPhone ? width * 2 : width}
height={isPhone ? 2000 : height}
texture={resources.road.texture}
/>
<CarAndCoinsLayer />
<MainCar
texture1={resources.car.texture}
texture2={resources.carOpacity25.texture}
texture3={resources.carOpacity5.texture}
y={isPhone ? height * 2.1 : height / 1.5}
x={mainCarXCoordinate}
xSetter={setMainCarXCoordinate}
/>
<ControlElements
resources={resources}
width={isPhone ? width * 1.6 : width}
height={isPhone ? height * 2.6 : height}
xSetter={isDisableControls ? doNothing : setMainCarXCoordinate}
/>
<ScoreTable texture={resources.coin.texture} />
<ElapsedTimer width={isPhone ? width * 1.6 : width} />
<PauseButton
width={isPhone ? width * 1.6 : width}
texture={
isTablet
? resources.pauseButton.texture
: resources.pauseButtonLarge.texture
}
/>
</TapMoverMainCar>
)}
/>
<MenuLayout
isGameEnd={isGameEnd}
isPause={isPause}
width={isPhone ? window.innerWidth * 1.6 : window.innerWidth}
height={isPhone ? window.innerHeight * 1.6 : window.innerHeight}
onExitClickCallBack={() => alert(coinCount)}
/>
</Container>
);
}); | x={0} |
actions.ts | import { Action } from '@ngrx/store';
import { KeyValue, NgrxFormControlId, ValidationErrors } from './state';
// NOTE: the explicit type declaration for the `TYPE` properties is required
// for the output declarations to properly use the literal string type instead
// of just `string`
export class SetValueAction<TValue> implements Action {
static readonly TYPE: 'ngrx/forms/SET_VALUE' = 'ngrx/forms/SET_VALUE';
readonly type = SetValueAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
public readonly value: TValue,
) { }
}
export class SetErrorsAction implements Action {
static readonly TYPE: 'ngrx/forms/SET_ERRORS' = 'ngrx/forms/SET_ERRORS';
readonly type = SetErrorsAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
public readonly errors: ValidationErrors,
) { }
}
export class SetAsyncErrorAction implements Action {
static readonly TYPE: 'ngrx/forms/SET_ASYNC_ERROR' = 'ngrx/forms/SET_ASYNC_ERROR';
readonly type = SetAsyncErrorAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
public readonly name: string,
public readonly value: any,
) { }
}
export class ClearAsyncErrorAction implements Action {
static readonly TYPE: 'ngrx/forms/CLEAR_ASYNC_ERROR' = 'ngrx/forms/CLEAR_ASYNC_ERROR';
readonly type = ClearAsyncErrorAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
public readonly name: string,
) { }
}
export class StartAsyncValidationAction implements Action {
static readonly TYPE: 'ngrx/forms/START_ASYNC_VALIDATION' = 'ngrx/forms/START_ASYNC_VALIDATION';
readonly type = StartAsyncValidationAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
public readonly name: string,
) { }
}
export class MarkAsDirtyAction implements Action {
static readonly TYPE: 'ngrx/forms/MARK_AS_DIRTY' = 'ngrx/forms/MARK_AS_DIRTY';
readonly type = MarkAsDirtyAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
) { }
}
export class MarkAsPristineAction implements Action {
static readonly TYPE: 'ngrx/forms/MARK_AS_PRISTINE' = 'ngrx/forms/MARK_AS_PRISTINE';
readonly type = MarkAsPristineAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
) { }
}
export class EnableAction implements Action {
static readonly TYPE: 'ngrx/forms/ENABLE' = 'ngrx/forms/ENABLE';
readonly type = EnableAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
) { }
}
export class DisableAction implements Action {
static readonly TYPE: 'ngrx/forms/DISABLE' = 'ngrx/forms/DISABLE';
readonly type = DisableAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
) { }
}
export class MarkAsTouchedAction implements Action {
static readonly TYPE: 'ngrx/forms/MARK_AS_TOUCHED' = 'ngrx/forms/MARK_AS_TOUCHED';
readonly type = MarkAsTouchedAction.TYPE;
|
export class MarkAsUntouchedAction implements Action {
static readonly TYPE: 'ngrx/forms/MARK_AS_UNTOUCHED' = 'ngrx/forms/MARK_AS_UNTOUCHED';
readonly type = MarkAsUntouchedAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
) { }
}
export class FocusAction implements Action {
static readonly TYPE: 'ngrx/forms/FOCUS' = 'ngrx/forms/FOCUS';
readonly type = FocusAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
) { }
}
export class UnfocusAction implements Action {
static readonly TYPE: 'ngrx/forms/UNFOCUS' = 'ngrx/forms/UNFOCUS';
readonly type = UnfocusAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
) { }
}
export class MarkAsSubmittedAction implements Action {
static readonly TYPE: 'ngrx/forms/MARK_AS_SUBMITTED' = 'ngrx/forms/MARK_AS_SUBMITTED';
readonly type = MarkAsSubmittedAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
) { }
}
export class MarkAsUnsubmittedAction implements Action {
static readonly TYPE: 'ngrx/forms/MARK_AS_UNSUBMITTED' = 'ngrx/forms/MARK_AS_UNSUBMITTED';
readonly type = MarkAsUnsubmittedAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
) { }
}
export class AddArrayControlAction<TValue> implements Action {
static readonly TYPE: 'ngrx/forms/ADD_ARRAY_CONTROL' = 'ngrx/forms/ADD_ARRAY_CONTROL';
readonly type = AddArrayControlAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
public readonly value: TValue,
public readonly index?: number,
) { }
}
export class AddGroupControlAction<TValue extends KeyValue, TControlKey extends keyof TValue = keyof TValue> implements Action {
static readonly TYPE: 'ngrx/forms/ADD_GROUP_CONTROL' = 'ngrx/forms/ADD_GROUP_CONTROL';
readonly type = AddGroupControlAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
public readonly name: keyof TValue,
public readonly value: TValue[TControlKey],
) { }
}
export class RemoveArrayControlAction implements Action {
static readonly TYPE: 'ngrx/forms/REMOVE_ARRAY_CONTROL' = 'ngrx/forms/REMOVE_ARRAY_CONTROL';
readonly type = RemoveArrayControlAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
public readonly index: number,
) { }
}
export class RemoveGroupControlAction<TValue> implements Action {
static readonly TYPE: 'ngrx/forms/REMOVE_CONTROL' = 'ngrx/forms/REMOVE_CONTROL';
readonly type = RemoveGroupControlAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
public readonly name: keyof TValue,
) { }
}
export class SetUserDefinedPropertyAction implements Action {
static readonly TYPE: 'ngrx/forms/SET_USER_DEFINED_PROPERTY' = 'ngrx/forms/SET_USER_DEFINED_PROPERTY';
readonly type = SetUserDefinedPropertyAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
public readonly name: string,
public readonly value: any,
) { }
}
export class ResetAction implements Action {
static readonly TYPE: 'ngrx/forms/RESET' = 'ngrx/forms/RESET';
readonly type = ResetAction.TYPE;
constructor(
public readonly controlId: NgrxFormControlId,
) { }
}
export type Actions<TValue> =
| SetValueAction<TValue>
| SetErrorsAction
| SetAsyncErrorAction
| ClearAsyncErrorAction
| StartAsyncValidationAction
| MarkAsDirtyAction
| MarkAsPristineAction
| EnableAction
| DisableAction
| MarkAsTouchedAction
| MarkAsUntouchedAction
| FocusAction
| UnfocusAction
| MarkAsSubmittedAction
| MarkAsUnsubmittedAction
| AddGroupControlAction<TValue>
| RemoveGroupControlAction<TValue>
| AddArrayControlAction<any>
| RemoveArrayControlAction
| SetUserDefinedPropertyAction
| ResetAction
;
export function isNgrxFormsAction(action: Action) {
return !!action.type && action.type.startsWith('ngrx/forms/');
} | constructor(
public readonly controlId: NgrxFormControlId,
) { }
} |
detector_mmdet.py | # model settings
model = dict(
type='CascadeRCNN',
pretrained='pytorch_resnext101.pth',
backbone=dict(
type='ResNeXt',
depth=101,
groups=32,
base_width=4,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_generator=dict(
type='AnchorGenerator',
scales=[8],
ratios=[0.5, 1.0, 2.0],
strides=[4, 8, 16, 32, 64]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0]),
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
roi_head=dict(
type='CascadeRoIHead',
num_stages=3,
stage_loss_weights=[1, 0.5, 0.25],
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=[-CLASS_COUNT_INSERT-],
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=[-CLASS_COUNT_INSERT-],
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=[-CLASS_COUNT_INSERT-],
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))
]))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
match_low_quality=True,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1, | allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=[
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.6,
min_pos_iou=0.6,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False)
])
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05, nms=dict(type='nms', iou_thr=0.5), max_per_img=100))
dataset_type = 'DeepDataset'
data_root = 'augmented_images/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=[-IMAGE_SCALE_INSERT-], keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=[-IMAGE_SCALE_INSERT-],
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
samples_per_gpu=[-IMAGES_PER_GPU_INSERT-],
workers_per_gpu=[-WORKERS_PER_GPU_INSERT-],
train=dict(
type=dataset_type,
ann_file=data_root + 'train_annotations.json',
img_prefix=data_root + 'train/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'validation_annotations.json',
img_prefix=data_root + 'val/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'test_annotations.json',
img_prefix=data_root + 'test/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric='bbox')
# optimizer
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[9, 18])
total_epochs = 50
checkpoint_config = dict(interval=2)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
dist_params = dict(backend='nccl')
log_level = 'INFO'
load_from = None
resume_from = None
workflow = [('train', 1)] | add_gt_as_proposals=False), |
gbrarscrapy.py | #!/usr/bin/env python3
# encoding: utf-8
from lxml import html
import requests
import os
import random
import time
from fake_agent import fakeagent
class Gbrarscrapy(object):
def __init__(self, url_li, proxy_single):
self.title_xpa = '//a[@onmouseover]/text()'
self.score_list_xpa = '//span[@style="color:DarkSlateGray"]/text()'
self.id_xpa = '//a[contains(@href,"/torrent/")]/@href'
self.ch_xpa = '//tr[@class="lista2"][{}]/td[2]/span/text()'
# self.date_list_xpa = '//td[contains(@align,"center")
# and contains(@width,"150px")]/text()'
self.seli_xpa = '//td[@align="center" and @width="50px"]/font/text()'
self.tor_dict = dict() # 地址字典(包含地址,健康度,评分)
self.headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=' +
'0.9,image/webp,image/apng,*/*;q=0.8',
'Cookies': 'skt=v97mrzygux; gaDts48g=q8h5pp9t; skt=v97mrzygux; gaDts48g=q8h5pp9t; expla=1; tcc; aby=2; ppu_main_9ef78edf998c4df1e1636c9a474d9f47=1; ppu_sub_9ef78edf998c4df1e1636c9a474d9f47=1; ppu_delay_9ef78edf998c4df1e1636c9a474d9f47=1'
}
self.url = url_li
self.pro = proxy_single
self.user_agent = fakeagent.load_ua()
def run(self):
while 1:
try:
temp_agent = random.choice(self.user_agent)
agent = temp_agent.split("\n")[0]
self.headers["User-Agent"] = agent
pro = {"http": "http://" + random.choice(self.pro)}
s = requests.get(self.url, headers=self.headers,
proxies=pro, timeout=10)
response = html.fromstring(s.text)
print(s.text)
title_l = response.xpath(self.title_xpa) # title
id = (response.xpath(self.id_xpa)) # id
seed = response.xpath(self.seli_xpa) # seed
torrent_f = self.torent_front(id)
for i in range(25):
# tor_addr 是完整种子下载地址
address = torrent_f[i] + title_l[i] + "-[rarbg.to].torrent"
check_sc = response.xpath(self.ch_xpa.format(i + 1))
# 电影名称提取
title = title_l[i].split(".1080p.")[0]
# 标记分数 无分数则为 0
if not check_sc or ('/' not in check_sc[0]):
score = 0
if '/' in check_sc[0]:
score = float((check_sc[0].split(" ")[-1]).split('/')[0])
if score >= 5:
self.torrent_dict(title_l[i], seed[i],
title, address, score)
time.sleep(2)
print(len(self.tor_dict), self.tor_dict)
print(self.url)
self.torrent_save()
print("保存成功一页")
break
except Exception as e:
print("REason: ", e)
print(self.url)
self.error_save_page(self.url)
def torent_front(self, id):
torrent_f = [] # 地址前缀
for i in range(len(id) - 8):
te = id[i + 8].split("torrent/")[-1]
if "comment" not in te:
temp = "https://rarbgprx.org/download.php?id={}&f=".format(te)
torrent_f.append(temp)
return torrent_f
def torrent_dict(self, title_l, seed, title, address, score):
# 检查是否重复
if title_l in self.tor_dict.keys():
# 检查健康度 及评分在5.0以上的数据
if seed > self.tor_dict[title][0]:
self.tor_dict[title] = [str(seed), address, str(score)]
else:
self.tor_dict[title] = [str(seed), address, str(score)]
else:
self.tor_dict[title] = [str(seed), address, str(score)]
def torrent_save(self):
with open(os.getcwd()+'/data/dianying.txt', 'a') as f:
for (i, j) in self.tor_dict.items():
f.write(i)
f.write(", ")
for k in j:
f.write(k)
f.write(", ")
f.write("\n")
def error_save_page(self, url):
with open(os.getcwd()+'/data/error_page_1.txt', 'a') as f:
f.write(url)
f.write( | "\n")
|
|
client.go | package moltencord
import (
"encoding/json"
"io/ioutil"
"os"
"github.com/MoltenCoreDev/moltencord/utils"
)
var baseUrl = "https://discordapp.com/api/v9"
type Client struct {
token string
User User
}
func NewClient(token string) *Client {
c := new(Client)
utils.SetToken(token)
return c
}
func (c *Client) Create(token string) error {
c.token = token
utils.SetToken(c.token)
resp, err := utils.MakeRequest("GET", baseUrl+"/users/@me", []byte{})
if err != nil {
return err
}
defer resp.Body.Close()
msg, err := ioutil.ReadAll(resp.Body)
if err != nil {
return err
}
json.Unmarshal(msg, &c.User)
return nil
}
func (c *Client) GetChannel(ID string) (Channel, error) {
var channel Channel
resp, err := utils.MakeRequest("GET", baseUrl+"/channels/"+ID, []byte{})
if err != nil {
return channel, err
}
defer resp.Body.Close()
msg, err := ioutil.ReadAll(resp.Body)
json.Unmarshal(msg, &channel)
return channel, err
}
func (c *Client) GetGuild(ID string) (Guild, error) {
var guild Guild
resp, err := utils.MakeRequest("GET", baseUrl+"/guilds/"+ID, []byte{})
if err != nil {
return guild, err
}
defer resp.Body.Close()
msg, err := ioutil.ReadAll(resp.Body)
json.Unmarshal(msg, &guild)
return guild, err
}
func (c *Client) GetGuilds() ([]Guild, error) {
var guilds []Guild
resp, err := utils.MakeRequest("GET", baseUrl+"/users/@me/guilds", []byte{})
if err != nil {
return guilds, err
}
defer resp.Body.Close()
msg, err := ioutil.ReadAll(resp.Body)
json.Unmarshal(msg, &guilds)
return guilds, err | func (c *Client) Close() {
os.Exit(0) // Succesful exit
} | }
|
lib.rs | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! This crate provides [`LibraDB`] which represents physical storage of the core Libra data
//! structures.
//!
//! It relays read/write operations on the physical storage via [`schemadb`] to the underlying
//! Key-Value storage system, and implements libra data structures on top of it.
// Used in other crates for testing.
pub mod mock_genesis;
// Used in this and other crates for testing.
#[cfg(any(test, feature = "testing"))]
pub mod test_helper;
pub mod errors;
pub mod schema;
mod change_set;
mod event_store;
mod ledger_counters;
mod ledger_store;
mod pruner;
mod state_store;
mod system_store;
mod transaction_store;
#[cfg(test)]
mod libradb_test;
use crate::{
change_set::{ChangeSet, SealedChangeSet},
errors::LibraDbError,
event_store::EventStore,
ledger_counters::LedgerCounters,
ledger_store::LedgerStore,
pruner::Pruner,
schema::*,
state_store::StateStore,
system_store::SystemStore,
transaction_store::TransactionStore,
};
use crypto::{hash::CryptoHash, HashValue};
use failure::prelude::*;
use itertools::{izip, zip_eq};
use lazy_static::lazy_static;
use logger::prelude::*;
use metrics::OpMetrics;
use nextgen_crypto::ed25519::*;
use schemadb::{ColumnFamilyOptions, ColumnFamilyOptionsMap, DB, DEFAULT_CF_NAME};
use std::{iter::Iterator, path::Path, sync::Arc, time::Instant};
use storage_proto::ExecutorStartupInfo;
use types::{
access_path::AccessPath,
account_address::AccountAddress,
account_config::get_account_resource_or_default,
account_state_blob::{AccountStateBlob, AccountStateWithProof},
contract_event::EventWithProof,
get_with_proof::{RequestItem, ResponseItem},
ledger_info::LedgerInfoWithSignatures,
proof::{AccountStateProof, EventProof, SignedTransactionProof, SparseMerkleProof},
transaction::{
SignedTransactionWithProof, TransactionInfo, TransactionListWithProof, TransactionToCommit,
Version,
},
validator_change::ValidatorChangeEventWithProof,
};
lazy_static! {
static ref OP_COUNTER: OpMetrics = OpMetrics::new_and_registered("storage");
}
const MAX_LIMIT: u64 = 1000;
const MAX_REQUEST_ITEMS: u64 = 100;
fn error_if_too_many_requested(num_requested: u64, max_allowed: u64) -> Result<()> {
if num_requested > max_allowed {
Err(LibraDbError::TooManyRequested(num_requested, max_allowed).into())
} else {
Ok(())
}
}
/// This holds a handle to the underlying DB responsible for physical storage and provides APIs for
/// access to the core Libra data structures.
pub struct LibraDB {
db: Arc<DB>,
ledger_store: LedgerStore,
transaction_store: TransactionStore,
state_store: StateStore,
event_store: EventStore,
system_store: SystemStore,
pruner: Pruner,
}
impl LibraDB {
/// Config parameter for the pruner.
const NUM_HISTORICAL_VERSIONS_TO_KEEP: u64 = 1_000_000;
/// This creates an empty LibraDB instance on disk or opens one if it already exists.
pub fn new<P: AsRef<Path> + Clone>(db_root_path: P) -> Self {
let cf_opts_map: ColumnFamilyOptionsMap = [
(
/* LedgerInfo CF = */ DEFAULT_CF_NAME,
ColumnFamilyOptions::default(),
),
(EVENT_ACCUMULATOR_CF_NAME, ColumnFamilyOptions::default()),
(EVENT_BY_ACCESS_PATH_CF_NAME, ColumnFamilyOptions::default()),
(EVENT_CF_NAME, ColumnFamilyOptions::default()),
(
JELLYFISH_MERKLE_NODE_CF_NAME,
ColumnFamilyOptions::default(),
),
(LEDGER_COUNTERS_CF_NAME, ColumnFamilyOptions::default()),
(STALE_NODE_INDEX_CF_NAME, ColumnFamilyOptions::default()),
(SIGNED_TRANSACTION_CF_NAME, ColumnFamilyOptions::default()),
(
TRANSACTION_ACCUMULATOR_CF_NAME,
ColumnFamilyOptions::default(),
),
(TRANSACTION_INFO_CF_NAME, ColumnFamilyOptions::default()),
(VALIDATOR_CF_NAME, ColumnFamilyOptions::default()),
]
.iter()
.cloned()
.collect();
let path = db_root_path.as_ref().join("libradb");
let instant = Instant::now();
let db = Arc::new(
DB::open(path.clone(), cf_opts_map)
.unwrap_or_else(|e| panic!("LibraDB open failed: {:?}", e)),
);
info!(
"Opened LibraDB at {:?} in {} ms",
path,
instant.elapsed().as_millis()
);
LibraDB {
db: Arc::clone(&db),
event_store: EventStore::new(Arc::clone(&db)),
ledger_store: LedgerStore::new(Arc::clone(&db)),
state_store: StateStore::new(Arc::clone(&db)),
transaction_store: TransactionStore::new(Arc::clone(&db)),
system_store: SystemStore::new(Arc::clone(&db)),
pruner: Pruner::new(Arc::clone(&db), Self::NUM_HISTORICAL_VERSIONS_TO_KEEP),
}
}
// ================================== Public API ==================================
/// Returns the account state corresponding to the given version and account address with proof
/// based on `ledger_version`
fn get_account_state_with_proof(
&self,
address: AccountAddress,
version: Version,
ledger_version: Version,
) -> Result<AccountStateWithProof> {
ensure!(
version <= ledger_version,
"The queried version {} should be equal to or older than ledger version {}.",
version,
ledger_version
);
let latest_version = self.get_latest_version()?;
ensure!(
ledger_version <= latest_version,
"The ledger version {} is greater than the latest version currently in ledger: {}",
ledger_version,
latest_version
);
let (txn_info, txn_info_accumulator_proof) = self
.ledger_store
.get_transaction_info_with_proof(version, ledger_version)?;
let (account_state_blob, sparse_merkle_proof) = self
.state_store
.get_account_state_with_proof_by_version(address, version)?;
Ok(AccountStateWithProof::new(
version,
account_state_blob,
AccountStateProof::new(txn_info_accumulator_proof, txn_info, sparse_merkle_proof),
))
}
/// Returns events specified by `access_path` with sequence number in range designated by
/// `start_seq_num`, `ascending` and `limit`. If ascending is true this query will return up to
/// `limit` events that were emitted after `start_event_seq_num`. Otherwise it will return up to
/// `limit` events in the reverse order. Both cases are inclusive.
fn get_events_by_event_access_path(
&self,
access_path: &AccessPath,
start_seq_num: u64,
ascending: bool,
limit: u64,
ledger_version: Version,
) -> Result<(Vec<EventWithProof>, Option<AccountStateWithProof>)> {
error_if_too_many_requested(limit, MAX_LIMIT)?;
let get_latest = !ascending && start_seq_num == u64::max_value();
let cursor = if get_latest {
// Caller wants the latest, figure out the latest seq_num.
// In the case of no events on that path, use 0 and expect empty result below.
self.event_store
.get_latest_sequence_number(ledger_version, access_path)?
.unwrap_or(0)
} else {
start_seq_num
};
// Convert requested range and order to a range in ascending order.
let (first_seq, real_limit) = get_first_seq_num_and_limit(ascending, cursor, limit)?;
// Query the index.
let mut event_keys = self.event_store.lookup_events_by_access_path(
access_path,
first_seq,
real_limit,
ledger_version,
)?;
// When descending, it's possible that user is asking for something beyond the latest
// sequence number, in which case we will consider it a bad request and return an empty
// list.
// For example, if the latest sequence number is 100, and the caller is asking for 110 to
// 90, we will get 90 to 100 from the index lookup above. Seeing that the last item
// is 100 instead of 110 tells us 110 is out of bound.
if !ascending {
if let Some((seq_num, _, _)) = event_keys.last() {
if *seq_num < cursor {
event_keys = Vec::new();
}
}
}
let mut events_with_proof = event_keys
.into_iter()
.map(|(seq, ver, idx)| {
let (event, event_proof) = self
.event_store
.get_event_with_proof_by_version_and_index(ver, idx)?;
ensure!(
seq == event.sequence_number(),
"Index broken, expected seq:{}, actual:{}",
seq,
event.sequence_number()
);
let (txn_info, txn_info_proof) = self
.ledger_store
.get_transaction_info_with_proof(ver, ledger_version)?;
let proof = EventProof::new(txn_info_proof, txn_info, event_proof);
Ok(EventWithProof::new(ver, idx, event, proof))
})
.collect::<Result<Vec<_>>>()?;
if !ascending {
events_with_proof.reverse();
}
// There are two cases where we need to return proof_of_latest_event to let the caller know
// the latest sequence number:
// 1. The user asks for the latest event by using u64::max() as the cursor, apparently
// he doesn't know the latest sequence number.
// 2. We are going to return less than `real_limit` items. (Two cases can lead to that:
// a. the cursor is beyond the latest sequence number; b. in ascending order we don't have
// enough items to return because the latest sequence number is hit). In this case we
// need to return the proof to convince the caller we didn't hide any item from him. Note
// that we use `real_limit` instead of `limit` here because it takes into account the case
// of hitting 0 in descending order, which is valid and doesn't require the proof.
let proof_of_latest_event = if get_latest || events_with_proof.len() < real_limit as usize {
Some(self.get_account_state_with_proof(
access_path.address,
ledger_version,
ledger_version,
)?)
} else {
None
};
Ok((events_with_proof, proof_of_latest_event))
}
/// Returns a signed transaction that is the `seq_num`-th one associated with the given account.
/// If the signed transaction with given `seq_num` doesn't exist, returns `None`.
// TODO(gzh): Use binary search for now. We may create seq_num index in the future.
fn get_txn_by_account_and_seq(
&self,
address: AccountAddress,
seq_num: u64,
ledger_version: Version,
fetch_events: bool,
) -> Result<Option<SignedTransactionWithProof>> {
// If txn with seq_num n is at some version, the corresponding account state at the
// same version will be the first account state that has seq_num n + 1.
let seq_num = seq_num + 1;
let (mut start_version, mut end_version) = (0, ledger_version);
while start_version < end_version {
let mid_version = start_version + (end_version - start_version) / 2;
let account_seq_num = self.get_account_seq_num_by_version(address, mid_version)?;
if account_seq_num >= seq_num {
end_version = mid_version;
} else {
start_version = mid_version + 1;
}
}
assert_eq!(start_version, end_version);
let seq_num_found = self.get_account_seq_num_by_version(address, start_version)?;
if seq_num_found < seq_num {
return Ok(None);
} else if seq_num_found > seq_num {
// log error
bail!("internal error: seq_num is not continuous.")
}
// start_version cannot be 0 (genesis version).
assert_eq!(
self.get_account_seq_num_by_version(address, start_version - 1)?,
seq_num_found - 1
);
self.get_transaction_with_proof(start_version, ledger_version, fetch_events)
.map(Some)
}
/// Gets the latest version number available in the ledger.
fn get_latest_version(&self) -> Result<Version> {
Ok(self
.ledger_store
.get_latest_ledger_info()?
.ledger_info()
.version())
}
/// Persist transactions. Called by the executor module when either syncing nodes or committing
/// blocks during normal operation.
///
/// `first_version` is the version of the first transaction in `txns_to_commit`.
/// When `ledger_info_with_sigs` is provided, verify that the transaction accumulator root hash
/// it carries is generated after the `txns_to_commit` are applied.
/// Note that even if `txns_to_commit` is empty, `frist_version` is checked to be
/// `ledger_info_with_sigs.ledger_info.version + 1` if `ledger_info_with_sigs` is not `None`.
pub fn save_transactions(
&self,
txns_to_commit: &[TransactionToCommit],
first_version: Version,
ledger_info_with_sigs: &Option<LedgerInfoWithSignatures<Ed25519Signature>>,
) -> Result<()> {
let num_txns = txns_to_commit.len() as u64;
// ledger_info_with_sigs could be None if we are doing state synchronization. In this case
// txns_to_commit should not be empty. Otherwise it is okay to commit empty blocks.
ensure!(
ledger_info_with_sigs.is_some() || num_txns > 0,
"txns_to_commit is empty while ledger_info_with_sigs is None.",
);
if let Some(x) = ledger_info_with_sigs {
let claimed_last_version = x.ledger_info().version();
ensure!(
claimed_last_version + 1 == first_version + num_txns,
"Transaction batch not applicable: first_version {}, num_txns {}, last_version {}",
first_version,
num_txns,
claimed_last_version,
);
}
// Gather db mutations to `batch`.
let mut cs = ChangeSet::new();
let new_root_hash = self.save_transactions_impl(txns_to_commit, first_version, &mut cs)?;
// If expected ledger info is provided, verify result root hash and save the ledger info.
if let Some(x) = ledger_info_with_sigs {
let expected_root_hash = x.ledger_info().transaction_accumulator_hash();
ensure!(
new_root_hash == expected_root_hash,
"Root hash calculated doesn't match expected. {:?} vs {:?}",
new_root_hash,
expected_root_hash,
);
self.ledger_store.put_ledger_info(x, &mut cs)?;
}
// Persist.
let (sealed_cs, counters) = self.seal_change_set(first_version, num_txns, cs)?;
self.commit(sealed_cs)?;
// Only increment counter if commit succeeds and there are at least one transaction written
// to the storage. That's also when we'd inform the pruner thread to work.
if num_txns > 0 {
let last_version = first_version + num_txns - 1;
OP_COUNTER.inc_by("committed_txns", num_txns as usize);
OP_COUNTER.set("latest_transaction_version", last_version as usize);
counters
.expect("Counters should be bumped with transactions being saved.")
.bump_op_counters();
self.pruner.wake(last_version);
}
Ok(())
}
fn save_transactions_impl(
&self,
txns_to_commit: &[TransactionToCommit],
first_version: u64,
mut cs: &mut ChangeSet,
) -> Result<HashValue> {
let last_version = first_version + txns_to_commit.len() as u64 - 1;
// Account state updates. Gather account state root hashes
let account_state_sets = txns_to_commit
.iter()
.map(|txn_to_commit| txn_to_commit.account_states().clone())
.collect::<Vec<_>>();
let state_root_hashes =
self.state_store
.put_account_state_sets(account_state_sets, first_version, &mut cs)?;
// Event updates. Gather event accumulator root hashes.
let event_root_hashes = zip_eq(first_version..=last_version, txns_to_commit)
.map(|(ver, txn_to_commit)| {
self.event_store
.put_events(ver, txn_to_commit.events(), &mut cs)
})
.collect::<Result<Vec<_>>>()?;
// Transaction updates. Gather transaction hashes.
zip_eq(first_version..=last_version, txns_to_commit)
.map(|(ver, txn_to_commit)| {
self.transaction_store
.put_transaction(ver, txn_to_commit.signed_txn(), &mut cs)
})
.collect::<Result<()>>()?;
let txn_hashes = txns_to_commit
.iter()
.map(|txn_to_commit| txn_to_commit.signed_txn().hash())
.collect::<Vec<_>>();
let gas_amounts = txns_to_commit
.iter()
.map(TransactionToCommit::gas_used)
.collect::<Vec<_>>();
// Transaction accumulator updates. Get result root hash.
let txn_infos = izip!(
txn_hashes,
state_root_hashes,
event_root_hashes,
gas_amounts
)
.map(|(t, s, e, g)| TransactionInfo::new(t, s, e, g))
.collect::<Vec<_>>();
assert_eq!(txn_infos.len(), txns_to_commit.len());
let new_root_hash =
self.ledger_store
.put_transaction_infos(first_version, &txn_infos, &mut cs)?;
Ok(new_root_hash)
}
/// This backs the `UpdateToLatestLedger` public read API which returns the latest
/// [`LedgerInfoWithSignatures`] together with items requested and proofs relative to the same
/// ledger info.
pub fn update_to_latest_ledger(
&self,
_client_known_version: u64,
request_items: Vec<RequestItem>,
) -> Result<(
Vec<ResponseItem>,
LedgerInfoWithSignatures<Ed25519Signature>,
Vec<ValidatorChangeEventWithProof<Ed25519Signature>>,
)> {
error_if_too_many_requested(request_items.len() as u64, MAX_REQUEST_ITEMS)?;
// Get the latest ledger info and signatures
let ledger_info_with_sigs = self.ledger_store.get_latest_ledger_info()?;
let ledger_version = ledger_info_with_sigs.ledger_info().version();
// Fulfill all request items
let response_items = request_items
.into_iter()
.map(|request_item| match request_item {
RequestItem::GetAccountState { address } => Ok(ResponseItem::GetAccountState {
account_state_with_proof: self.get_account_state_with_proof(
address,
ledger_version,
ledger_version,
)?,
}),
RequestItem::GetAccountTransactionBySequenceNumber {
account,
sequence_number,
fetch_events,
} => {
let signed_transaction_with_proof = self.get_txn_by_account_and_seq(
account,
sequence_number,
ledger_version,
fetch_events,
)?;
let proof_of_current_sequence_number = match signed_transaction_with_proof {
Some(_) => None,
None => Some(self.get_account_state_with_proof(
account,
ledger_version,
ledger_version,
)?),
};
Ok(ResponseItem::GetAccountTransactionBySequenceNumber {
signed_transaction_with_proof,
proof_of_current_sequence_number,
})
}
RequestItem::GetEventsByEventAccessPath {
access_path,
start_event_seq_num,
ascending,
limit,
} => {
let (events_with_proof, proof_of_latest_event) = self
.get_events_by_event_access_path(
&access_path,
start_event_seq_num,
ascending,
limit,
ledger_version,
)?;
Ok(ResponseItem::GetEventsByEventAccessPath {
events_with_proof,
proof_of_latest_event,
})
}
RequestItem::GetTransactions {
start_version,
limit,
fetch_events,
} => {
let txn_list_with_proof =
self.get_transactions(start_version, limit, ledger_version, fetch_events)?;
Ok(ResponseItem::GetTransactions {
txn_list_with_proof,
})
}
})
.collect::<Result<Vec<_>>>()?;
Ok((
response_items,
ledger_info_with_sigs,
vec![], /* TODO: validator_change_events */
))
}
// =========================== Execution Internal APIs ========================================
/// Gets an account state by account address, out of the ledger state indicated by the state
/// Merkle tree root hash.
///
/// This is used by the executor module internally.
pub fn get_account_state_with_proof_by_version(
&self,
address: AccountAddress,
version: Version,
) -> Result<(Option<AccountStateBlob>, SparseMerkleProof)> |
/// Gets information needed from storage during the startup of the executor module.
///
/// This is used by the executor module internally.
pub fn get_executor_startup_info(&self) -> Result<Option<ExecutorStartupInfo>> {
// Get the latest ledger info. Return None if not bootstrapped.
let ledger_info_with_sigs = match self.ledger_store.get_latest_ledger_info_option()? {
Some(x) => x,
None => return Ok(None),
};
let ledger_info = ledger_info_with_sigs.ledger_info().clone();
let (latest_version, txn_info) = self.ledger_store.get_latest_transaction_info()?;
let account_state_root_hash = txn_info.state_root_hash();
let ledger_frozen_subtree_hashes = self
.ledger_store
.get_ledger_frozen_subtree_hashes(latest_version)?;
Ok(Some(ExecutorStartupInfo {
ledger_info,
latest_version,
account_state_root_hash,
ledger_frozen_subtree_hashes,
}))
}
// ======================= State Synchronizer Internal APIs ===================================
/// Gets a batch of transactions for the purpose of synchronizing state to another node.
///
/// This is used by the State Synchronizer module internally.
pub fn get_transactions(
&self,
start_version: Version,
limit: u64,
ledger_version: Version,
fetch_events: bool,
) -> Result<TransactionListWithProof> {
error_if_too_many_requested(limit, MAX_LIMIT)?;
if start_version > ledger_version || limit == 0 {
return Ok(TransactionListWithProof::new_empty());
}
let limit = std::cmp::min(limit, ledger_version - start_version + 1);
let txn_and_txn_info_list = (start_version..start_version + limit)
.into_iter()
.map(|version| {
Ok((
self.transaction_store.get_transaction(version)?,
self.ledger_store.get_transaction_info(version)?,
))
})
.collect::<Result<Vec<_>>>()?;
let proof_of_first_transaction = Some(
self.ledger_store
.get_transaction_proof(start_version, ledger_version)?,
);
let proof_of_last_transaction = if limit == 1 {
None
} else {
Some(
self.ledger_store
.get_transaction_proof(start_version + limit - 1, ledger_version)?,
)
};
let events = if fetch_events {
Some(
(start_version..start_version + limit)
.into_iter()
.map(|version| Ok(self.event_store.get_events_by_version(version)?))
.collect::<Result<Vec<_>>>()?,
)
} else {
None
};
Ok(TransactionListWithProof::new(
txn_and_txn_info_list,
events,
Some(start_version),
proof_of_first_transaction,
proof_of_last_transaction,
))
}
// ================================== Private APIs ==================================
/// Convert a `ChangeSet` to `SealedChangeSet`.
///
/// Specifically, counter increases are added to current counter values and converted to DB
/// alternations.
fn seal_change_set(
&self,
first_version: Version,
num_txns: Version,
mut cs: ChangeSet,
) -> Result<(SealedChangeSet, Option<LedgerCounters>)> {
// Avoid reading base counter values when not necessary.
let counters = if num_txns > 0 {
Some(self.system_store.bump_ledger_counters(
first_version,
first_version + num_txns - 1,
cs.counter_bumps,
&mut cs.batch,
)?)
} else {
None
};
Ok((SealedChangeSet { batch: cs.batch }, counters))
}
/// Write the whole schema batch including all data necessary to mutate the ledger
/// state of some transaction by leveraging rocksdb atomicity support. Also committed are the
/// LedgerCounters.
fn commit(&self, sealed_cs: SealedChangeSet) -> Result<()> {
self.db.write_schemas(sealed_cs.batch)?;
match self.db.get_approximate_sizes_cf() {
Ok(cf_sizes) => {
for (cf_name, size) in cf_sizes {
OP_COUNTER.set(&format!("cf_size_bytes_{}", cf_name), size as usize);
}
}
Err(err) => warn!(
"Failed to get approximate size of column families: {}.",
err
),
}
Ok(())
}
fn get_account_seq_num_by_version(
&self,
address: AccountAddress,
version: Version,
) -> Result<u64> {
let (account_state_blob, _proof) = self
.state_store
.get_account_state_with_proof_by_version(address, version)?;
// If an account does not exist, we treat it as if it has sequence number 0.
Ok(get_account_resource_or_default(&account_state_blob)?.sequence_number())
}
fn get_transaction_with_proof(
&self,
version: Version,
ledger_version: Version,
fetch_events: bool,
) -> Result<SignedTransactionWithProof> {
let proof = {
let (txn_info, txn_info_accumulator_proof) = self
.ledger_store
.get_transaction_info_with_proof(version, ledger_version)?;
SignedTransactionProof::new(txn_info_accumulator_proof, txn_info)
};
let signed_transaction = self.transaction_store.get_transaction(version)?;
// If events were requested, also fetch those.
let events = if fetch_events {
Some(self.event_store.get_events_by_version(version)?)
} else {
None
};
Ok(SignedTransactionWithProof {
version,
signed_transaction,
events,
proof,
})
}
}
// Convert requested range and order to a range in ascending order.
fn get_first_seq_num_and_limit(ascending: bool, cursor: u64, limit: u64) -> Result<(u64, u64)> {
ensure!(limit > 0, "limit should > 0, got {}", limit);
Ok(if ascending {
(cursor, limit)
} else if limit <= cursor {
(cursor - limit + 1, limit)
} else {
(0, cursor + 1)
})
}
| {
self.state_store
.get_account_state_with_proof_by_version(address, version)
} |
args.rs | use ic_config::ConfigSource;
use ic_types::ReplicaVersion;
use std::convert::TryFrom;
use std::path::PathBuf;
use structopt::StructOpt;
#[derive(Debug, StructOpt)] | #[structopt(
name = "replica",
about = "Arguments for the Internet Computer Replica."
)]
pub struct ReplicaArgs {
/// Print a sample config if set
#[structopt(long)]
pub print_sample_config: bool,
/// The path to the Replica config file
#[structopt(long, parse(from_os_str))]
pub config_file: Option<PathBuf>,
/// A string representation of the Replica config
#[structopt(long)]
pub config_literal: Option<String>,
/// A path to a CBOR-encoded catch-up package to seed the Replica with
#[structopt(long, parse(from_os_str))]
pub catch_up_package: Option<PathBuf>,
/// The version of the Replica being run
#[structopt(long, parse(try_from_str = ReplicaVersion::try_from))]
pub replica_version: ReplicaVersion,
/// A path to the PEM file with the public key of the NNS subnet.
/// It's used to certify responses of the registry canister.
#[structopt(long, parse(from_os_str))]
pub nns_public_key_file: Option<PathBuf>,
/// Force to use the given subnet ID. This is needed to upgrade NNS
/// replicas. In that case, we already know which subnet ID we should be
/// booting with, and trying to determine it from the registry will fail
/// Example SubnetID: ak2jc-de3ae-aaaaa-aaaap-yai
#[structopt(long)]
pub force_subnet: Option<String>,
}
impl From<&ReplicaArgs> for ConfigSource {
fn from(args: &ReplicaArgs) -> ConfigSource {
if let Some(path) = &args.config_file {
ConfigSource::File(path.clone())
} else if let Some(literal) = &args.config_literal {
ConfigSource::Literal(literal.clone())
} else {
ConfigSource::Default
}
}
} | |
start.go | package main
import (
"fmt"
"os"
extension "code.cloudfoundry.org/eirini-ssh/extension"
"code.cloudfoundry.org/eirini-ssh/version"
eirinix "code.cloudfoundry.org/eirinix"
"github.com/spf13/cobra"
"github.com/spf13/viper"
_ "k8s.io/client-go/plugin/pkg/client/auth/oidc" // from https://github.com/kubernetes/client-go/issues/345
)
var startCmd = &cobra.Command{
Use: "start",
Short: "Start the eirini extension",
PreRun: func(cmd *cobra.Command, args []string) {
viper.BindPFlag("kubeconfig", cmd.Flags().Lookup("kubeconfig"))
viper.BindPFlag("namespace", cmd.Flags().Lookup("namespace"))
viper.BindPFlag("operator-webhook-host", cmd.Flags().Lookup("operator-webhook-host"))
viper.BindPFlag("operator-webhook-port", cmd.Flags().Lookup("operator-webhook-port"))
viper.BindPFlag("operator-service-name", cmd.Flags().Lookup("operator-service-name"))
viper.BindPFlag("operator-webhook-namespace", cmd.Flags().Lookup("operator-webhook-namespace"))
viper.BindPFlag("register", cmd.Flags().Lookup("register"))
viper.BindEnv("kubeconfig")
viper.BindEnv("namespace", "NAMESPACE")
viper.BindEnv("operator-webhook-host", "OPERATOR_WEBHOOK_HOST")
viper.BindEnv("operator-webhook-port", "OPERATOR_WEBHOOK_PORT")
viper.BindEnv("operator-service-name", "OPERATOR_SERVICE_NAME") | viper.BindEnv("register", "EIRINI_EXTENSION_REGISTER")
},
Run: func(cmd *cobra.Command, args []string) {
defer log.Sync()
kubeConfig := viper.GetString("kubeconfig")
namespace := viper.GetString("namespace")
log.Infof("Starting %s with namespace %s", version.Version, namespace)
webhookHost := viper.GetString("operator-webhook-host")
webhookPort := viper.GetInt32("operator-webhook-port")
serviceName := viper.GetString("operator-service-name")
webhookNamespace := viper.GetString("operator-webhook-namespace")
register := viper.GetBool("register")
if webhookHost == "" {
log.Fatal("required flag 'operator-webhook-host' not set (env variable: OPERATOR_WEBHOOK_HOST)")
}
RegisterWebhooks := true
if !register {
log.Info("The extension will start without registering")
RegisterWebhooks = false
}
filterEiriniApps := true
x := eirinix.NewManager(
eirinix.ManagerOptions{
FilterEiriniApps: &filterEiriniApps,
OperatorFingerprint: "eirini-ssh",
Namespace: namespace,
Host: webhookHost,
Port: webhookPort,
KubeConfig: kubeConfig,
ServiceName: serviceName,
WebhookNamespace: webhookNamespace,
RegisterWebHook: &RegisterWebhooks,
})
x.AddExtension(&extension.SSH{Namespace: namespace})
x.AddExtension(&extension.CleanupWatcher{})
if err := x.Start(); err != nil {
fmt.Println(err.Error())
os.Exit(1)
}
},
}
func init() {
startCmd.Flags().BoolP("register", "r", true, "Register the extension")
rootCmd.AddCommand(startCmd)
} | viper.BindEnv("operator-webhook-namespace", "OPERATOR_WEBHOOK_NAMESPACE") |
models.py | import cgi
import datetime
import email.message
import json as jsonlib
import typing
import urllib.request
from collections.abc import MutableMapping
from http.cookiejar import Cookie, CookieJar
from urllib.parse import parse_qsl, urlencode
import chardet
import rfc3986
from .config import USER_AGENT
from .decoders import (
ACCEPT_ENCODING,
SUPPORTED_DECODERS,
Decoder,
IdentityDecoder,
MultiDecoder,
TextDecoder,
)
from .exceptions import (
CookieConflict,
HTTPError,
InvalidURL,
NotRedirectResponse,
ResponseClosed,
ResponseNotRead,
StreamConsumed,
)
from .multipart import multipart_encode
from .status_codes import StatusCode
from .utils import (
guess_json_utf,
is_known_encoding,
normalize_header_key,
normalize_header_value,
obfuscate_sensitive_headers,
parse_header_links,
str_query_param,
)
if typing.TYPE_CHECKING: # pragma: no cover
from .middleware.base import BaseMiddleware # noqa: F401
from .dispatch.base import AsyncDispatcher # noqa: F401
PrimitiveData = typing.Optional[typing.Union[str, int, float, bool]]
URLTypes = typing.Union["URL", str]
QueryParamTypes = typing.Union[
"QueryParams",
typing.Mapping[str, PrimitiveData],
typing.List[typing.Tuple[str, PrimitiveData]],
str,
]
HeaderTypes = typing.Union[
"Headers",
typing.Dict[typing.AnyStr, typing.AnyStr],
typing.List[typing.Tuple[typing.AnyStr, typing.AnyStr]],
]
CookieTypes = typing.Union["Cookies", CookieJar, typing.Dict[str, str]]
AuthTypes = typing.Union[
typing.Tuple[typing.Union[str, bytes], typing.Union[str, bytes]],
typing.Callable[["AsyncRequest"], "AsyncRequest"],
"BaseMiddleware",
]
ProxiesTypes = typing.Union[
URLTypes,
"AsyncDispatcher",
typing.Dict[URLTypes, typing.Union[URLTypes, "AsyncDispatcher"]],
]
AsyncRequestData = typing.Union[dict, str, bytes, typing.AsyncIterator[bytes]]
RequestData = typing.Union[dict, str, bytes, typing.Iterator[bytes]]
RequestFiles = typing.Dict[
str,
typing.Union[
typing.IO[typing.AnyStr], # file
typing.Tuple[str, typing.IO[typing.AnyStr]], # (filename, file)
typing.Tuple[
str, typing.IO[typing.AnyStr], str
], # (filename, file, content_type)
],
]
AsyncResponseContent = typing.Union[bytes, typing.AsyncIterator[bytes]]
ResponseContent = typing.Union[bytes, typing.Iterator[bytes]]
class URL:
def __init__(
self,
url: URLTypes,
allow_relative: bool = False,
params: QueryParamTypes = None,
) -> None:
if isinstance(url, str):
self._uri_reference = rfc3986.api.iri_reference(url).encode()
else:
self._uri_reference = url._uri_reference
# Normalize scheme and domain name.
if self.is_absolute_url:
self._uri_reference = self._uri_reference.normalize()
# Add any query parameters.
if params:
query_string = str(QueryParams(params))
self._uri_reference = self._uri_reference.copy_with(query=query_string)
# Enforce absolute URLs by default.
if not allow_relative:
if not self.scheme:
raise InvalidURL("No scheme included in URL.")
if not self.host:
raise InvalidURL("No host included in URL.")
# Allow setting full_path to custom attributes requests
# like OPTIONS, CONNECT, and forwarding proxy requests.
self._full_path: typing.Optional[str] = None
@property
def scheme(self) -> str:
return self._uri_reference.scheme or ""
@property
def authority(self) -> str:
return self._uri_reference.authority or ""
@property
def userinfo(self) -> str:
|
@property
def username(self) -> str:
userinfo = self._uri_reference.userinfo or ""
return userinfo.partition(":")[0]
@property
def password(self) -> str:
userinfo = self._uri_reference.userinfo or ""
return userinfo.partition(":")[2]
@property
def host(self) -> str:
return self._uri_reference.host or ""
@property
def port(self) -> int:
port = self._uri_reference.port
if port is None:
return {"https": 443, "http": 80}[self.scheme]
return int(port)
@property
def path(self) -> str:
return self._uri_reference.path or "/"
@property
def query(self) -> str:
return self._uri_reference.query or ""
@property
def full_path(self) -> str:
if self._full_path is not None:
return self._full_path
path = self.path
if self.query:
path += "?" + self.query
return path
@full_path.setter
def full_path(self, value: typing.Optional[str]) -> None:
self._full_path = value
@property
def fragment(self) -> str:
return self._uri_reference.fragment or ""
@property
def is_ssl(self) -> bool:
return self.scheme == "https"
@property
def is_absolute_url(self) -> bool:
"""
Return `True` for absolute URLs such as 'http://example.com/path',
and `False` for relative URLs such as '/path'.
"""
# We don't use `.is_absolute` from `rfc3986` because it treats
# URLs with a fragment portion as not absolute.
# What we actually care about is if the URL provides
# a scheme and hostname to which connections should be made.
return bool(self.scheme and self.host)
@property
def is_relative_url(self) -> bool:
return not self.is_absolute_url
@property
def origin(self) -> "Origin":
return Origin(self)
def copy_with(self, **kwargs: typing.Any) -> "URL":
if (
"username" in kwargs
or "password" in kwargs
or "host" in kwargs
or "port" in kwargs
):
host = kwargs.pop("host", self.host)
port = kwargs.pop("port", self.port)
username = kwargs.pop("username", self.username)
password = kwargs.pop("password", self.password)
authority = host
if port is not None:
authority += f":{port}"
if username is not None:
userpass = username
if password is not None:
userpass += f":{password}"
authority = f"{userpass}@{authority}"
kwargs["authority"] = authority
return URL(self._uri_reference.copy_with(**kwargs).unsplit())
def join(self, relative_url: URLTypes) -> "URL":
"""
Return an absolute URL, using given this URL as the base.
"""
if self.is_relative_url:
return URL(relative_url)
# We drop any fragment portion, because RFC 3986 strictly
# treats URLs with a fragment portion as not being absolute URLs.
base_uri = self._uri_reference.copy_with(fragment=None)
relative_url = URL(relative_url, allow_relative=True)
return URL(relative_url._uri_reference.resolve_with(base_uri).unsplit())
def __hash__(self) -> int:
return hash(str(self))
def __eq__(self, other: typing.Any) -> bool:
return isinstance(other, (URL, str)) and str(self) == str(other)
def __str__(self) -> str:
return self._uri_reference.unsplit()
def __repr__(self) -> str:
class_name = self.__class__.__name__
url_str = str(self)
if self._uri_reference.userinfo:
url_str = (
rfc3986.urlparse(url_str)
.copy_with(userinfo=f"{self.username}:[secure]")
.unsplit()
)
return f"{class_name}({url_str!r})"
class Origin:
"""
The URL scheme and authority information, as a comparable, hashable object.
"""
def __init__(self, url: URLTypes) -> None:
if not isinstance(url, URL):
url = URL(url)
self.scheme = url.scheme
self.is_ssl = url.is_ssl
self.host = url.host
self.port = url.port
def __eq__(self, other: typing.Any) -> bool:
return (
isinstance(other, self.__class__)
and self.scheme == other.scheme
and self.host == other.host
and self.port == other.port
)
def __hash__(self) -> int:
return hash((self.scheme, self.host, self.port))
def __repr__(self) -> str:
class_name = self.__class__.__name__
return (
f"{class_name}(scheme={self.scheme!r} host={self.host!r} port={self.port})"
)
class QueryParams(typing.Mapping[str, str]):
"""
URL query parameters, as a multi-dict.
"""
def __init__(self, *args: QueryParamTypes, **kwargs: typing.Any) -> None:
assert len(args) < 2, "Too many arguments."
assert not (args and kwargs), "Cannot mix named and unnamed arguments."
value = args[0] if args else kwargs
if isinstance(value, str):
items = parse_qsl(value)
elif isinstance(value, QueryParams):
items = value.multi_items()
elif isinstance(value, list):
items = value # type: ignore
else:
items = value.items() # type: ignore
self._list = [(str(k), str_query_param(v)) for k, v in items]
self._dict = {str(k): str_query_param(v) for k, v in items}
def getlist(self, key: typing.Any) -> typing.List[str]:
return [item_value for item_key, item_value in self._list if item_key == key]
def keys(self) -> typing.KeysView:
return self._dict.keys()
def values(self) -> typing.ValuesView:
return self._dict.values()
def items(self) -> typing.ItemsView:
return self._dict.items()
def multi_items(self) -> typing.List[typing.Tuple[str, str]]:
return list(self._list)
def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any:
if key in self._dict:
return self._dict[key]
return default
def update(self, params: QueryParamTypes = None) -> None: # type: ignore
if not params:
return
params = QueryParams(params)
for param in params:
self[param] = params[param]
def __getitem__(self, key: typing.Any) -> str:
return self._dict[key]
def __setitem__(self, key: str, value: str) -> None:
self._dict[key] = value
found_indexes = []
for idx, (item_key, _) in enumerate(self._list):
if item_key == key:
found_indexes.append(idx)
for idx in reversed(found_indexes[1:]):
del self._list[idx]
if found_indexes:
idx = found_indexes[0]
self._list[idx] = (key, value)
else:
self._list.append((key, value))
def __contains__(self, key: typing.Any) -> bool:
return key in self._dict
def __iter__(self) -> typing.Iterator[typing.Any]:
return iter(self.keys())
def __len__(self) -> int:
return len(self._dict)
def __eq__(self, other: typing.Any) -> bool:
if not isinstance(other, self.__class__):
return False
return sorted(self._list) == sorted(other._list)
def __str__(self) -> str:
return urlencode(self._list)
def __repr__(self) -> str:
class_name = self.__class__.__name__
query_string = str(self)
return f"{class_name}({query_string!r})"
class Headers(typing.MutableMapping[str, str]):
"""
HTTP headers, as a case-insensitive multi-dict.
"""
def __init__(self, headers: HeaderTypes = None, encoding: str = None) -> None:
if headers is None:
self._list = [] # type: typing.List[typing.Tuple[bytes, bytes]]
elif isinstance(headers, Headers):
self._list = list(headers.raw)
elif isinstance(headers, dict):
self._list = [
(normalize_header_key(k, encoding), normalize_header_value(v, encoding))
for k, v in headers.items()
]
else:
self._list = [
(normalize_header_key(k, encoding), normalize_header_value(v, encoding))
for k, v in headers
]
self._encoding = encoding
@property
def encoding(self) -> str:
"""
Header encoding is mandated as ascii, but we allow fallbacks to utf-8
or iso-8859-1.
"""
if self._encoding is None:
for encoding in ["ascii", "utf-8"]:
for key, value in self.raw:
try:
key.decode(encoding)
value.decode(encoding)
except UnicodeDecodeError:
break
else:
# The else block runs if 'break' did not occur, meaning
# all values fitted the encoding.
self._encoding = encoding
break
else:
# The ISO-8859-1 encoding covers all 256 code points in a byte,
# so will never raise decode errors.
self._encoding = "iso-8859-1"
return self._encoding
@encoding.setter
def encoding(self, value: str) -> None:
self._encoding = value
@property
def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]:
"""
Returns a list of the raw header items, as byte pairs.
May be mutated in-place.
"""
return self._list
def keys(self) -> typing.List[str]: # type: ignore
return [key.decode(self.encoding) for key, value in self._list]
def values(self) -> typing.List[str]: # type: ignore
return [value.decode(self.encoding) for key, value in self._list]
def items(self) -> typing.List[typing.Tuple[str, str]]: # type: ignore
return [
(key.decode(self.encoding), value.decode(self.encoding))
for key, value in self._list
]
def get(self, key: str, default: typing.Any = None) -> typing.Any:
try:
return self[key]
except KeyError:
return default
def getlist(self, key: str, split_commas: bool = False) -> typing.List[str]:
"""
Return multiple header values.
"""
get_header_key = key.lower().encode(self.encoding)
values = [
item_value.decode(self.encoding)
for item_key, item_value in self._list
if item_key == get_header_key
]
if not split_commas:
return values
split_values = []
for value in values:
split_values.extend([item.strip() for item in value.split(",")])
return split_values
def update(self, headers: HeaderTypes = None) -> None: # type: ignore
headers = Headers(headers)
for header in headers:
self[header] = headers[header]
def copy(self) -> "Headers":
return Headers(self.items(), encoding=self.encoding)
def __getitem__(self, key: str) -> str:
"""
Return a single header value.
If there are multiple headers with the same key, then we concatenate
them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2
"""
normalized_key = key.lower().encode(self.encoding)
items = []
for header_key, header_value in self._list:
if header_key == normalized_key:
items.append(header_value.decode(self.encoding))
if items:
return ", ".join(items)
raise KeyError(key)
def __setitem__(self, key: str, value: str) -> None:
"""
Set the header `key` to `value`, removing any duplicate entries.
Retains insertion order.
"""
set_key = key.lower().encode(self.encoding)
set_value = value.encode(self.encoding)
found_indexes = []
for idx, (item_key, _) in enumerate(self._list):
if item_key == set_key:
found_indexes.append(idx)
for idx in reversed(found_indexes[1:]):
del self._list[idx]
if found_indexes:
idx = found_indexes[0]
self._list[idx] = (set_key, set_value)
else:
self._list.append((set_key, set_value))
def __delitem__(self, key: str) -> None:
"""
Remove the header `key`.
"""
del_key = key.lower().encode(self.encoding)
pop_indexes = []
for idx, (item_key, _) in enumerate(self._list):
if item_key == del_key:
pop_indexes.append(idx)
if not pop_indexes:
raise KeyError(key)
for idx in reversed(pop_indexes):
del self._list[idx]
def __contains__(self, key: typing.Any) -> bool:
get_header_key = key.lower().encode(self.encoding)
for header_key, _ in self._list:
if header_key == get_header_key:
return True
return False
def __iter__(self) -> typing.Iterator[typing.Any]:
return iter(self.keys())
def __len__(self) -> int:
return len(self._list)
def __eq__(self, other: typing.Any) -> bool:
if not isinstance(other, Headers):
return False
return sorted(self._list) == sorted(other._list)
def __repr__(self) -> str:
class_name = self.__class__.__name__
encoding_str = ""
if self.encoding != "ascii":
encoding_str = f", encoding={self.encoding!r}"
as_list = list(obfuscate_sensitive_headers(self.items()))
as_dict = dict(as_list)
no_duplicate_keys = len(as_dict) == len(as_list)
if no_duplicate_keys:
return f"{class_name}({as_dict!r}{encoding_str})"
return f"{class_name}({as_list!r}{encoding_str})"
class BaseRequest:
def __init__(
self,
method: str,
url: typing.Union[str, URL],
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
):
self.method = method.upper()
self.url = URL(url, params=params)
self.headers = Headers(headers)
if cookies:
self._cookies = Cookies(cookies)
self._cookies.set_cookie_header(self)
def encode_data(
self, data: dict = None, files: RequestFiles = None, json: typing.Any = None
) -> typing.Tuple[bytes, str]:
if json is not None:
content = jsonlib.dumps(json).encode("utf-8")
content_type = "application/json"
elif files is not None:
content, content_type = multipart_encode(data or {}, files)
elif data is not None:
content = urlencode(data, doseq=True).encode("utf-8")
content_type = "application/x-www-form-urlencoded"
else:
content = b""
content_type = ""
return content, content_type
def prepare(self) -> None:
content: typing.Optional[bytes] = getattr(self, "content", None)
is_streaming = getattr(self, "is_streaming", False)
auto_headers: typing.List[typing.Tuple[bytes, bytes]] = []
has_host = "host" in self.headers
has_user_agent = "user-agent" in self.headers
has_accept = "accept" in self.headers
has_content_length = (
"content-length" in self.headers or "transfer-encoding" in self.headers
)
has_accept_encoding = "accept-encoding" in self.headers
has_connection = "connection" in self.headers
if not has_host:
url = self.url
if url.userinfo:
url = url.copy_with(username=None, password=None)
auto_headers.append((b"host", url.authority.encode("ascii")))
if not has_user_agent:
auto_headers.append((b"user-agent", USER_AGENT.encode("ascii")))
if not has_accept:
auto_headers.append((b"accept", b"*/*"))
if not has_content_length:
if is_streaming:
auto_headers.append((b"transfer-encoding", b"chunked"))
elif content:
content_length = str(len(content)).encode()
auto_headers.append((b"content-length", content_length))
if not has_accept_encoding:
auto_headers.append((b"accept-encoding", ACCEPT_ENCODING.encode()))
if not has_connection:
auto_headers.append((b"connection", b"keep-alive"))
for item in reversed(auto_headers):
self.headers.raw.insert(0, item)
@property
def cookies(self) -> "Cookies":
if not hasattr(self, "_cookies"):
self._cookies = Cookies()
return self._cookies
def __repr__(self) -> str:
class_name = self.__class__.__name__
url = str(self.url)
return f"<{class_name}({self.method!r}, {url!r})>"
class AsyncRequest(BaseRequest):
def __init__(
self,
method: str,
url: typing.Union[str, URL],
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
data: AsyncRequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
):
super().__init__(
method=method, url=url, params=params, headers=headers, cookies=cookies
)
if data is None or isinstance(data, dict):
content, content_type = self.encode_data(data, files, json)
self.is_streaming = False
self.content = content
if content_type:
self.headers["Content-Type"] = content_type
elif isinstance(data, (str, bytes)):
data = data.encode("utf-8") if isinstance(data, str) else data
self.is_streaming = False
self.content = data
else:
assert hasattr(data, "__aiter__")
self.is_streaming = True
self.content_aiter = data
self.prepare()
async def read(self) -> bytes:
"""
Read and return the response content.
"""
if not hasattr(self, "content"):
self.content = b"".join([part async for part in self.stream()])
return self.content
async def stream(self) -> typing.AsyncIterator[bytes]:
if self.is_streaming:
async for part in self.content_aiter:
yield part
elif self.content:
yield self.content
class Request(BaseRequest):
def __init__(
self,
method: str,
url: typing.Union[str, URL],
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
data: RequestData = None,
files: RequestFiles = None,
json: typing.Any = None,
):
super().__init__(
method=method, url=url, params=params, headers=headers, cookies=cookies
)
if data is None or isinstance(data, dict):
content, content_type = self.encode_data(data, files, json)
self.is_streaming = False
self.content = content
if content_type:
self.headers["Content-Type"] = content_type
elif isinstance(data, (str, bytes)):
data = data.encode("utf-8") if isinstance(data, str) else data
self.is_streaming = False
self.content = data
else:
assert hasattr(data, "__iter__")
self.is_streaming = True
self.content_iter = data
self.prepare()
def read(self) -> bytes:
if not hasattr(self, "content"):
self.content = b"".join([part for part in self.stream()])
return self.content
def stream(self) -> typing.Iterator[bytes]:
if self.is_streaming:
for part in self.content_iter:
yield part
elif self.content:
yield self.content
class BaseResponse:
def __init__(
self,
status_code: int,
*,
http_version: str = None,
headers: HeaderTypes = None,
request: BaseRequest = None,
on_close: typing.Callable = None,
elapsed: datetime.timedelta = None,
):
self.status_code = status_code
self.http_version = http_version
self.headers = Headers(headers)
self.request = request
self.on_close = on_close
self.elapsed = datetime.timedelta(0) if elapsed is None else elapsed
self.call_next: typing.Optional[typing.Callable] = None
@property
def reason_phrase(self) -> str:
return StatusCode.get_reason_phrase(self.status_code)
@property
def url(self) -> typing.Optional[URL]:
"""
Returns the URL for which the request was made.
Requires that `request` was provided when instantiating the response.
"""
return None if self.request is None else self.request.url
@property
def content(self) -> bytes:
if not hasattr(self, "_content"):
if hasattr(self, "_raw_content"):
raw_content = self._raw_content # type: ignore
content = self.decoder.decode(raw_content)
content += self.decoder.flush()
self._content = content
else:
raise ResponseNotRead()
return self._content
@property
def text(self) -> str:
if not hasattr(self, "_text"):
content = self.content
if not content:
self._text = ""
else:
encoding = self.encoding
self._text = content.decode(encoding, errors="replace")
return self._text
@property
def encoding(self) -> str:
if not hasattr(self, "_encoding"):
encoding = self.charset_encoding
if encoding is None or not is_known_encoding(encoding):
encoding = self.apparent_encoding
if encoding is None or not is_known_encoding(encoding):
encoding = "utf-8"
self._encoding = encoding
return self._encoding
@encoding.setter
def encoding(self, value: str) -> None:
self._encoding = value
@property
def charset_encoding(self) -> typing.Optional[str]:
"""
Return the encoding, as specified by the Content-Type header.
"""
content_type = self.headers.get("Content-Type")
if content_type is None:
return None
parsed = cgi.parse_header(content_type)
media_type, params = parsed[0], parsed[-1]
if "charset" in params:
return params["charset"].strip("'\"")
# RFC 2616 specifies that 'iso-8859-1' should be used as the default
# for 'text/*' media types, if no charset is provided.
# See: https://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1
if media_type.startswith("text/"):
return "iso-8859-1"
return None
@property
def apparent_encoding(self) -> typing.Optional[str]:
"""
Return the encoding, as it appears to autodetection.
"""
return chardet.detect(self.content)["encoding"]
@property
def decoder(self) -> Decoder:
"""
Returns a decoder instance which can be used to decode the raw byte
content, depending on the Content-Encoding used in the response.
"""
if not hasattr(self, "_decoder"):
decoders: typing.List[Decoder] = []
values = self.headers.getlist("content-encoding", split_commas=True)
for value in values:
value = value.strip().lower()
try:
decoder_cls = SUPPORTED_DECODERS[value]
decoders.append(decoder_cls())
except KeyError:
continue
if len(decoders) == 1:
self._decoder = decoders[0]
elif len(decoders) > 1:
self._decoder = MultiDecoder(decoders)
else:
self._decoder = IdentityDecoder()
return self._decoder
@property
def is_redirect(self) -> bool:
return StatusCode.is_redirect(self.status_code) and "location" in self.headers
def raise_for_status(self) -> None:
"""
Raise the `HttpError` if one occurred.
"""
message = (
"{0.status_code} {error_type}: {0.reason_phrase} for url: {0.url}\n"
"For more information check: https://httpstatuses.com/{0.status_code}"
)
if StatusCode.is_client_error(self.status_code):
message = message.format(self, error_type="Client Error")
elif StatusCode.is_server_error(self.status_code):
message = message.format(self, error_type="Server Error")
else:
message = ""
if message:
raise HTTPError(message, response=self)
def json(self, **kwargs: typing.Any) -> typing.Union[dict, list]:
if self.charset_encoding is None and self.content and len(self.content) > 3:
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
return jsonlib.loads(self.content.decode(encoding), **kwargs)
except UnicodeDecodeError:
pass
return jsonlib.loads(self.text, **kwargs)
@property
def cookies(self) -> "Cookies":
if not hasattr(self, "_cookies"):
assert self.request is not None
self._cookies = Cookies()
self._cookies.extract_cookies(self)
return self._cookies
@property
def links(self) -> typing.Dict[typing.Optional[str], typing.Dict[str, str]]:
"""
Returns the parsed header links of the response, if any
"""
header = self.headers.get("link")
ldict = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get("rel") or link.get("url")
ldict[key] = link
return ldict
def __repr__(self) -> str:
return f"<Response [{self.status_code} {self.reason_phrase}]>"
class AsyncResponse(BaseResponse):
def __init__(
self,
status_code: int,
*,
http_version: str = None,
headers: HeaderTypes = None,
content: AsyncResponseContent = None,
on_close: typing.Callable = None,
request: AsyncRequest = None,
history: typing.List["BaseResponse"] = None,
elapsed: datetime.timedelta = None,
):
super().__init__(
status_code=status_code,
http_version=http_version,
headers=headers,
request=request,
on_close=on_close,
elapsed=elapsed,
)
self.history = [] if history is None else list(history)
if content is None or isinstance(content, bytes):
self.is_closed = True
self.is_stream_consumed = True
self._raw_content = content or b""
else:
self.is_closed = False
self.is_stream_consumed = False
self._raw_stream = content
async def read(self) -> bytes:
"""
Read and return the response content.
"""
if not hasattr(self, "_content"):
self._content = b"".join([part async for part in self.stream()])
return self._content
async def stream(self) -> typing.AsyncIterator[bytes]:
"""
A byte-iterator over the decoded response content.
This allows us to handle gzip, deflate, and brotli encoded responses.
"""
if hasattr(self, "_content"):
yield self._content
else:
async for chunk in self.raw():
yield self.decoder.decode(chunk)
yield self.decoder.flush()
async def stream_text(self) -> typing.AsyncIterator[str]:
"""
A str-iterator over the decoded response content
that handles both gzip, deflate, etc but also detects the content's
string encoding.
"""
decoder = TextDecoder(encoding=self.charset_encoding)
async for chunk in self.stream():
yield decoder.decode(chunk)
yield decoder.flush()
async def raw(self) -> typing.AsyncIterator[bytes]:
"""
A byte-iterator over the raw response content.
"""
if hasattr(self, "_raw_content"):
yield self._raw_content
else:
if self.is_stream_consumed:
raise StreamConsumed()
if self.is_closed:
raise ResponseClosed()
self.is_stream_consumed = True
async for part in self._raw_stream:
yield part
await self.close()
async def next(self) -> "AsyncResponse":
"""
Get the next response from a redirect response.
"""
if not self.is_redirect:
raise NotRedirectResponse()
assert self.call_next is not None
return await self.call_next()
async def close(self) -> None:
"""
Close the response and release the connection.
Automatically called if the response body is read to completion.
"""
if not self.is_closed:
self.is_closed = True
if self.on_close is not None:
await self.on_close()
class Response(BaseResponse):
def __init__(
self,
status_code: int,
*,
http_version: str = None,
headers: HeaderTypes = None,
content: ResponseContent = None,
on_close: typing.Callable = None,
request: Request = None,
history: typing.List["BaseResponse"] = None,
elapsed: datetime.timedelta = None,
):
super().__init__(
status_code=status_code,
http_version=http_version,
headers=headers,
request=request,
on_close=on_close,
elapsed=elapsed,
)
self.history = [] if history is None else list(history)
if content is None or isinstance(content, bytes):
self.is_closed = True
self.is_stream_consumed = True
self._raw_content = content or b""
else:
self.is_closed = False
self.is_stream_consumed = False
self._raw_stream = content
def read(self) -> bytes:
"""
Read and return the response content.
"""
if not hasattr(self, "_content"):
self._content = b"".join([part for part in self.stream()])
return self._content
def stream(self) -> typing.Iterator[bytes]:
"""
A byte-iterator over the decoded response content.
This allows us to handle gzip, deflate, and brotli encoded responses.
"""
if hasattr(self, "_content"):
yield self._content
else:
for chunk in self.raw():
yield self.decoder.decode(chunk)
yield self.decoder.flush()
def stream_text(self) -> typing.Iterator[str]:
"""
A str-iterator over the decoded response content
that handles both gzip, deflate, etc but also detects the content's
string encoding.
"""
decoder = TextDecoder(encoding=self.charset_encoding)
for chunk in self.stream():
yield decoder.decode(chunk)
yield decoder.flush()
def raw(self) -> typing.Iterator[bytes]:
"""
A byte-iterator over the raw response content.
"""
if hasattr(self, "_raw_content"):
yield self._raw_content
else:
if self.is_stream_consumed:
raise StreamConsumed()
if self.is_closed:
raise ResponseClosed()
self.is_stream_consumed = True
for part in self._raw_stream:
yield part
self.close()
def close(self) -> None:
"""
Close the response and release the connection.
Automatically called if the response body is read to completion.
"""
if not self.is_closed:
self.is_closed = True
if self.on_close is not None:
self.on_close()
class Cookies(MutableMapping):
"""
HTTP Cookies, as a mutable mapping.
"""
def __init__(self, cookies: CookieTypes = None) -> None:
if cookies is None or isinstance(cookies, dict):
self.jar = CookieJar()
if isinstance(cookies, dict):
for key, value in cookies.items():
self.set(key, value)
elif isinstance(cookies, Cookies):
self.jar = CookieJar()
for cookie in cookies.jar:
self.jar.set_cookie(cookie)
else:
self.jar = cookies
def extract_cookies(self, response: BaseResponse) -> None:
"""
Loads any cookies based on the response `Set-Cookie` headers.
"""
assert response.request is not None
urlib_response = self._CookieCompatResponse(response)
urllib_request = self._CookieCompatRequest(response.request)
self.jar.extract_cookies(urlib_response, urllib_request) # type: ignore
def set_cookie_header(self, request: BaseRequest) -> None:
"""
Sets an appropriate 'Cookie:' HTTP header on the `Request`.
"""
urllib_request = self._CookieCompatRequest(request)
self.jar.add_cookie_header(urllib_request)
def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None:
"""
Set a cookie value by name. May optionally include domain and path.
"""
kwargs = {
"version": 0,
"name": name,
"value": value,
"port": None,
"port_specified": False,
"domain": domain,
"domain_specified": bool(domain),
"domain_initial_dot": domain.startswith("."),
"path": path,
"path_specified": bool(path),
"secure": False,
"expires": None,
"discard": True,
"comment": None,
"comment_url": None,
"rest": {"HttpOnly": None},
"rfc2109": False,
}
cookie = Cookie(**kwargs) # type: ignore
self.jar.set_cookie(cookie)
def get( # type: ignore
self, name: str, default: str = None, domain: str = None, path: str = None
) -> typing.Optional[str]:
"""
Get a cookie by name. May optionally include domain and path
in order to specify exactly which cookie to retrieve.
"""
value = None
for cookie in self.jar:
if cookie.name == name:
if domain is None or cookie.domain == domain: # type: ignore
if path is None or cookie.path == path:
if value is not None:
message = f"Multiple cookies exist with name={name}"
raise CookieConflict(message)
value = cookie.value
if value is None:
return default
return value
def delete(self, name: str, domain: str = None, path: str = None) -> None:
"""
Delete a cookie by name. May optionally include domain and path
in order to specify exactly which cookie to delete.
"""
if domain is not None and path is not None:
return self.jar.clear(domain, path, name)
remove = []
for cookie in self.jar:
if cookie.name == name:
if domain is None or cookie.domain == domain: # type: ignore
if path is None or cookie.path == path:
remove.append(cookie)
for cookie in remove:
self.jar.clear(cookie.domain, cookie.path, cookie.name) # type: ignore
def clear(self, domain: str = None, path: str = None) -> None:
"""
Delete all cookies. Optionally include a domain and path in
order to only delete a subset of all the cookies.
"""
args = []
if domain is not None:
args.append(domain)
if path is not None:
assert domain is not None
args.append(path)
self.jar.clear(*args)
def update(self, cookies: CookieTypes = None) -> None: # type: ignore
cookies = Cookies(cookies)
for cookie in cookies.jar:
self.jar.set_cookie(cookie)
def __setitem__(self, name: str, value: str) -> None:
return self.set(name, value)
def __getitem__(self, name: str) -> str:
value = self.get(name)
if value is None:
raise KeyError(name)
return value
def __delitem__(self, name: str) -> None:
return self.delete(name)
def __len__(self) -> int:
return len(self.jar)
def __iter__(self) -> typing.Iterator[str]:
return (cookie.name for cookie in self.jar)
def __bool__(self) -> bool:
for _ in self.jar:
return True
return False
class _CookieCompatRequest(urllib.request.Request):
"""
Wraps a `Request` instance up in a compatibility interface suitable
for use with `CookieJar` operations.
"""
def __init__(self, request: BaseRequest) -> None:
super().__init__(
url=str(request.url),
headers=dict(request.headers),
method=request.method,
)
self.request = request
def add_unredirected_header(self, key: str, value: str) -> None:
super().add_unredirected_header(key, value)
self.request.headers[key] = value
class _CookieCompatResponse:
"""
Wraps a `Request` instance up in a compatibility interface suitable
for use with `CookieJar` operations.
"""
def __init__(self, response: BaseResponse):
self.response = response
def info(self) -> email.message.Message:
info = email.message.Message()
for key, value in self.response.headers.items():
info[key] = value
return info
| return self._uri_reference.userinfo or "" |
index.ts | import { Agile } from '../agile';
import { shared } from '../shared';
import { Collection, CreateCollectionConfig, DefaultItem } from './collection';
export * from './collection';
export * from './collection.persistent';
export * from './group';
export * from './group/group.observer';
export * from './item';
export * from './selector';
/**
* Returns a newly created Collection.
*
* A Collection manages a reactive set of Information
* that we need to remember globally at a later point in time.
* While providing a toolkit to use and mutate this set of Information.
*
* It is designed for arrays of data objects following the same pattern.
*
* Each of these data object must have a unique `primaryKey` to be correctly identified later.
*
* You can create as many global Collections as you need.
*
* [Learn more..](https://agile-ts.org/docs/core/agile-instance/methods#createcollection)
*
* @public
* @param config - Configuration object
* @param agileInstance - Instance of Agile the Collection belongs to.
*/
export function | <DataType extends DefaultItem = DefaultItem>(
config?: CreateCollectionConfig<DataType>,
agileInstance: Agile = shared
): Collection<DataType> {
return new Collection<DataType>(agileInstance, config);
}
| createCollection |
WebUserList.js | import React, { PureComponent, Fragment } from 'react';
import Link from 'umi/link';
import { connect } from 'dva';
import { Card, Form, Input, Button, Row, Col, Divider, Modal, Tooltip } from 'antd';
import StandardTable from '@/components/StandardTable';
import StandardQueryList from '@/components/StandardQueryList';
import PageHeaderWrapper from '@/components/PageHeaderWrapper';
import { formatFormValues, serializeSearchParam } from '@/utils/search';
const FormItem = Form.Item;
@connect(({ webUser, loading }) => ({
webUser,
loading: loading.models.webUser,
}))
@Form.create()
class WebUserList extends PureComponent {
state = {
selectedRows: [],
formValues: {},
};
columns = [
{
title: 'ID',
dataIndex: 'id',
},
{
title: '姓名',
dataIndex: 'name',
},
{
title: '手机号',
dataIndex: 'mobile',
},
{
title: '角色',
dataIndex: 'roleDesc',
},
{
title: '创建时间',
dataIndex: 'createdAt',
},
{
title: '操作',
render: (text, row) => (
<Fragment>
<Link to={`/webUser/${row.id}`}>修改</Link>
<Divider type="vertical" />
<a onClick={() => this.handleDeleteUser(row)}>删除</a>
</Fragment>
),
},
];
componentDidMount() {
const { dispatch } = this.props;
dispatch({
type: 'webUser/search',
});
}
handleDeleteUser(row) {
const { dispatch } = this.props;
Modal.confirm({
title: '删除',
content: `确定删除用户"${row.name}"?`,
okText: '确认',
cancelText: '取消',
onOk: () => {
dispatch({
type: 'webUser/delete',
payload: { id: row.id },
});
},
});
}
handleSelectRows(rows) {
this.setState({
selectedRows: rows || [],
});
}
handleStandardTableChange(pagination, filters, sorter) {
const { dispatch } = this.props;
const { formValues } = this.state;
const params = serializeSearchParam(pagination, formValues, filters, sorter);
dispatch({
type: 'webUser/search',
payload: params,
});
}
handleFormReset() {
const { form, dispatch } = this.props;
form.resetFields();
this.setState({
formValues: {},
});
dispatch({
type: 'webUser/search',
payload: {},
});
}
handleSearch() {
const { dispatch, form } = this.props;
form.validateFields((err, fieldsValue) => {
if (err) return;
const values = formatFormValues(fieldsValue);
this.setState({
formValues: values,
});
dispatch({
type: 'webUser/search',
payload: {
...values,
},
});
});
}
renderQueryForm() {
const {
form: { getFieldDecorator },
} = this.props;
return (
<Form layout="inline">
<Row gutter={{ md: 8, lg: 24, xl: 48 }}>
<Col md={8} sm={24}>
<FormItem label="手机号">
{getFieldDecorator('mobile')(<Input placeholder="精确查询" />)}
</FormItem>
</Col>
<Col md={8} sm={24}>
<FormItem label="姓名">
{getFieldDecorator('name')(<Input placeholder="支持模糊查询" />)}
</FormItem>
</Col>
</Row>
</Form> | const {
webUser: { webUserList },
loading,
} = this.props;
const { selectedRows } = this.state;
return (
<PageHeaderWrapper>
<Card bordered={false}>
<StandardQueryList
form={this.renderQueryForm()}
leftOperators={
<Fragment>
<Link to="/webUser/new">
<Button icon="plus" type="primary">
新建
</Button>
</Link>
{selectedRows.length > 0 && (
<span>
<Tooltip placement="topLeft" title="暂不支持">
<Button>批量删除</Button>
</Tooltip>
</span>
)}
</Fragment>
}
rightOperators={
<Fragment>
<Button icon="close" onClick={() => this.handleFormReset()}>
重置
</Button>
<Button icon="search" type="primary" onClick={() => this.handleSearch()}>
查询
</Button>
</Fragment>
}
table={
<StandardTable
selectedRows={selectedRows}
loading={loading}
data={webUserList}
rowKey="id"
columns={this.columns}
onSelectRow={rows => this.handleSelectRows(rows)}
onChange={(...args) => this.handleStandardTableChange(...args)}
/>
}
/>
</Card>
</PageHeaderWrapper>
);
}
}
export default WebUserList; | );
}
render() { |
json.rs | use serde::Serialize;
use serde_json::to_writer;
use std::sync::mpsc::{channel, Receiver, Sender};
use std::thread::{spawn, JoinHandle};
use std::io::{stdout, BufWriter, Write};
use std::path::{Path, PathBuf}; | /// Output data as [JSON lines] (a.k.a [Newline-deliminated JSON]) like following:
///
/// ```ignore
/// {"a": 1, "b": [1, 2, 3]}
/// {"a": 2, "b": [2, 2, 3]}
/// ```
///
/// [JSON lines]: http://jsonlines.org/
/// [Newline-deliminated JSON]: http://ndjson.org/
#[derive(Debug, Clone)]
pub struct JsonSink {
filename: PathBuf,
}
impl JsonSink {
pub fn from_path(path: &Path) -> Self {
Self {
filename: PathBuf::from(path),
}
}
pub fn from_str(path: &str) -> Self {
Self {
filename: From::from(path.to_string()),
}
}
}
impl<Doc: 'static + Send + Serialize> Sink<Doc> for JsonSink {
fn run(self) -> (Sender<Doc>, JoinHandle<()>) {
let (s, r) = channel::<Doc>();
let th = spawn(move || {
let buf = BufWriter::new(File::create(self.filename).ok().unwrap());
output_json(buf, r)
});
(s, th)
}
}
/// Output data as JSON into stdout
#[derive(Debug, Clone, new)]
pub struct StdoutSink {}
impl<Doc: 'static + Send + Serialize> Sink<Doc> for StdoutSink {
fn run(self) -> (Sender<Doc>, JoinHandle<()>) {
let (s, r) = channel::<Doc>();
let th = spawn(move || {
let out = stdout();
let buf = BufWriter::new(out.lock());
output_json(buf, r)
});
(s, th)
}
}
fn output_json<Buf, Doc>(mut buf: Buf, r: Receiver<Doc>)
where
Buf: Write,
Doc: Serialize,
{
loop {
match r.recv() {
Ok(doc) => {
to_writer(&mut buf, &doc).unwrap();
writeln!(buf, "").unwrap();
}
Err(_) => return,
}
}
} | use std::fs::File;
use super::Sink;
|
conftest.py | import pytest
def pytest_addoption(parser):
parser.addoption(
"--master", action="store", default="", help="IP address of GKE master")
parser.addoption(
"--namespace", action="store", default="", help="namespace of server")
parser.addoption(
"--service", action="store", default="",
help="The name of the mnist K8s service")
@pytest.fixture
def master(request):
return request.config.getoption("--master")
@pytest.fixture
def namespace(request):
|
@pytest.fixture
def service(request):
return request.config.getoption("--service")
| return request.config.getoption("--namespace") |
Struct.ts | import Variable from "./Variable.js";
import { formatLowerFirst } from "../utils.js";
class Struct {
constructor(
public type: string,
public members: (Variable | Struct)[],
public name?: string,
public visibility?: GPUShaderStageFlags,
public arrayCount?: number
) {
this.name = name || formatLowerFirst(type);
}
public getGLSLString(): string { | .map((variable) => ` ${variable.type} ${variable.name};`)
.join("\n")}
};
`;
}
}
export default Struct; | return /* glsl */ `struct ${this.type} {
${this.members |
etcd_process.go | // Copyright 2017 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package e2e
import (
"fmt"
"net/url"
"os"
"go.etcd.io/etcd/client/pkg/v3/fileutil"
"go.etcd.io/etcd/pkg/v3/expect"
"go.uber.org/zap"
)
var (
etcdServerReadyLines = []string{"ready to serve client requests"}
binPath string
ctlBinPath string
utlBinPath string
)
// etcdProcess is a process that serves etcd requests.
type etcdProcess interface {
EndpointsV2() []string
EndpointsV3() []string
EndpointsMetrics() []string
Start() error
Restart() error
Stop() error
Close() error
WithStopSignal(sig os.Signal) os.Signal
Config() *etcdServerProcessConfig
}
type etcdServerProcess struct {
cfg *etcdServerProcessConfig
proc *expect.ExpectProcess
donec chan struct{} // closed when Interact() terminates
}
type etcdServerProcessConfig struct {
lg *zap.Logger
execPath string
args []string
tlsArgs []string
dataDirPath string
keepDataDir bool
name string
purl url.URL
acurl string
murl string
initialToken string
initialCluster string
}
func newEtcdServerProcess(cfg *etcdServerProcessConfig) (*etcdServerProcess, error) {
if !fileutil.Exist(cfg.execPath) {
return nil, fmt.Errorf("could not find etcd binary: %s", cfg.execPath)
}
if !cfg.keepDataDir {
if err := os.RemoveAll(cfg.dataDirPath); err != nil {
return nil, err
}
}
return &etcdServerProcess{cfg: cfg, donec: make(chan struct{})}, nil
}
func (ep *etcdServerProcess) EndpointsV2() []string { return []string{ep.cfg.acurl} }
func (ep *etcdServerProcess) EndpointsV3() []string { return ep.EndpointsV2() }
func (ep *etcdServerProcess) EndpointsMetrics() []string { return []string{ep.cfg.murl} }
func (ep *etcdServerProcess) Start() error {
if ep.proc != nil {
panic("already started")
}
ep.cfg.lg.Info("starting server...", zap.String("name", ep.cfg.name)) | return err
}
ep.proc = proc
err = ep.waitReady()
if err == nil {
ep.cfg.lg.Info("started server.", zap.String("name", ep.cfg.name))
}
return err
}
func (ep *etcdServerProcess) Restart() error {
ep.cfg.lg.Info("restaring server...", zap.String("name", ep.cfg.name))
if err := ep.Stop(); err != nil {
return err
}
ep.donec = make(chan struct{})
err := ep.Start()
if err == nil {
ep.cfg.lg.Info("restared server", zap.String("name", ep.cfg.name))
}
return err
}
func (ep *etcdServerProcess) Stop() (err error) {
ep.cfg.lg.Info("stoping server...", zap.String("name", ep.cfg.name))
if ep == nil || ep.proc == nil {
return nil
}
err = ep.proc.Stop()
if err != nil {
return err
}
ep.proc = nil
<-ep.donec
ep.donec = make(chan struct{})
if ep.cfg.purl.Scheme == "unix" || ep.cfg.purl.Scheme == "unixs" {
err = os.Remove(ep.cfg.purl.Host + ep.cfg.purl.Path)
if err != nil && !os.IsNotExist(err) {
return err
}
}
ep.cfg.lg.Info("stopped server.", zap.String("name", ep.cfg.name))
return nil
}
func (ep *etcdServerProcess) Close() error {
ep.cfg.lg.Info("closing server...", zap.String("name", ep.cfg.name))
if err := ep.Stop(); err != nil {
return err
}
if !ep.cfg.keepDataDir {
ep.cfg.lg.Info("removing directory", zap.String("data-dir", ep.cfg.dataDirPath))
return os.RemoveAll(ep.cfg.dataDirPath)
}
return nil
}
func (ep *etcdServerProcess) WithStopSignal(sig os.Signal) os.Signal {
ret := ep.proc.StopSignal
ep.proc.StopSignal = sig
return ret
}
func (ep *etcdServerProcess) waitReady() error {
defer close(ep.donec)
return waitReadyExpectProc(ep.proc, etcdServerReadyLines)
}
func (ep *etcdServerProcess) Config() *etcdServerProcessConfig { return ep.cfg } | proc, err := spawnCmdWithLogger(ep.cfg.lg, append([]string{ep.cfg.execPath}, ep.cfg.args...))
if err != nil { |
widget_gallery.rs | // Copyright 2020 The xi-editor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use druid::{
im,
kurbo::{Affine, BezPath, Circle, Point},
piet::{FixedLinearGradient, GradientStop, InterpolationMode},
widget::{
prelude::*, Button, Checkbox, FillStrat, Flex, Image, Label, List, Painter, ProgressBar,
RadioGroup, Scroll, Slider, Spinner, Stepper, Switch, TextBox,
},
AppLauncher, Color, Data, ImageBuf, Lens, Widget, WidgetExt, WidgetPod, WindowDesc,
};
#[cfg(feature = "svg")]
use druid::widget::{Svg, SvgData};
const XI_IMAGE: &[u8] = include_bytes!("assets/xi.image");
#[derive(Clone, Data, Lens)]
struct AppData {
label_data: String,
checkbox_data: bool,
clicked_count: u64,
list_items: im::Vector<String>,
progressbar: f64,
radio: MyRadio,
stepper: f64,
editable_text: String,
}
#[derive(Clone, Data, PartialEq)]
enum MyRadio {
GaGa,
GuGu,
BaaBaa,
}
pub fn | () {
let main_window = WindowDesc::new(ui_builder()).title("Widget Gallery");
// Set our initial data
let data = AppData {
label_data: "test".into(),
checkbox_data: false,
clicked_count: 0,
list_items: im::vector!["1".into(), "2".into(), "3".into()],
progressbar: 0.5,
radio: MyRadio::GaGa,
stepper: 0.0,
editable_text: "edit me!".into(),
};
AppLauncher::with_window(main_window)
.log_to_console()
.launch(data)
.expect("launch failed");
}
fn ui_builder() -> impl Widget<AppData> {
#[cfg(feature = "svg")]
let svg_example = label_widget(
Svg::new(
include_str!("./assets/tiger.svg")
.parse::<SvgData>()
.unwrap(),
),
"Svg",
);
#[cfg(not(feature = "svg"))]
let svg_example = label_widget(Label::new("SVG not supported (yet)").center(), "Svg");
Scroll::new(
SquaresGrid::new()
.with_cell_size(Size::new(200.0, 240.0))
.with_spacing(20.0)
.with_child(label_widget(
Label::new(|data: &AppData, _: &_| data.label_data.clone()),
"Label",
))
.with_child(label_widget(
Flex::column()
.with_child(
Button::new("Click me!")
.on_click(|_, data: &mut AppData, _: &_| data.clicked_count += 1),
)
.with_spacer(4.0)
.with_child(Label::new(|data: &AppData, _: &_| {
format!("Clicked {} times!", data.clicked_count)
})),
"Button",
))
.with_child(label_widget(
Checkbox::new("Check me!").lens(AppData::checkbox_data),
"Checkbox",
))
.with_child(label_widget(
List::new(|| {
Label::new(|data: &String, _: &_| format!("List item: {}", data))
.center()
.background(Color::hlc(230.0, 50.0, 50.0))
.fix_height(40.0)
.expand_width()
})
.lens(AppData::list_items),
"List",
))
.with_child(label_widget(
Flex::column()
.with_child(ProgressBar::new().lens(AppData::progressbar))
.with_spacer(4.0)
.with_child(Label::new(|data: &AppData, _: &_| {
format!("{:.1}%", data.progressbar * 100.0)
}))
.with_spacer(4.0)
.with_child(
Flex::row()
.with_child(Button::new("<<").on_click(|_, data: &mut AppData, _| {
data.progressbar = (data.progressbar - 0.05).max(0.0);
}))
.with_spacer(4.0)
.with_child(Button::new(">>").on_click(|_, data: &mut AppData, _| {
data.progressbar = (data.progressbar + 0.05).min(1.0);
})),
),
"ProgressBar",
))
// The image example here uses hard-coded literal image data included in the binary.
// You may also want to load an image at runtime using a crate like `image`.
.with_child(label_widget(
Painter::new(paint_example).fix_size(32.0, 32.0),
"Painter",
))
.with_child(label_widget(
RadioGroup::new(vec![
("radio gaga", MyRadio::GaGa),
("radio gugu", MyRadio::GuGu),
("radio baabaa", MyRadio::BaaBaa),
])
.lens(AppData::radio),
"RadioGroup",
))
.with_child(label_widget(
Flex::column()
.with_child(
Slider::new()
.with_range(0.05, 0.95)
.with_step(0.10)
.lens(AppData::progressbar),
)
.with_spacer(4.0)
.with_child(Label::new(|data: &AppData, _: &_| {
format!("{:3.2}%", data.progressbar * 100.)
})),
"Slider",
))
.with_child(label_widget(
Flex::row()
.with_child(Stepper::new().lens(AppData::stepper))
.with_spacer(4.0)
.with_child(Label::new(|data: &AppData, _: &_| {
format!("{:.1}", data.stepper)
})),
"Stepper",
))
.with_child(label_widget(
TextBox::new().lens(AppData::editable_text),
"TextBox",
))
.with_child(label_widget(
Switch::new().lens(AppData::checkbox_data),
"Switch",
))
.with_child(label_widget(
Spinner::new().fix_height(40.0).center(),
"Spinner",
))
.with_child(label_widget(
Image::new(
ImageBuf::from_data(include_bytes!("./assets/PicWithAlpha.png")).unwrap(),
)
.fill_mode(FillStrat::Fill)
.interpolation_mode(InterpolationMode::Bilinear),
"Image",
))
.with_child(svg_example),
)
.vertical()
}
fn label_widget<T: Data>(widget: impl Widget<T> + 'static, label: &str) -> impl Widget<T> {
Flex::column()
.must_fill_main_axis(true)
.with_flex_child(widget.center(), 1.0)
.with_child(
Painter::new(|ctx, _: &_, _: &_| {
let size = ctx.size().to_rect();
ctx.fill(size, &Color::WHITE)
})
.fix_height(1.0),
)
.with_child(Label::new(label).center().fix_height(40.0))
.border(Color::WHITE, 1.0)
}
fn load_xi_image<Ctx: druid::RenderContext>(ctx: &mut Ctx) -> Ctx::Image {
ctx.make_image(32, 32, XI_IMAGE, druid::piet::ImageFormat::Rgb)
.unwrap()
}
fn paint_example<T>(ctx: &mut PaintCtx, _: &T, _env: &Env) {
let bounds = ctx.size().to_rect();
let img = load_xi_image(ctx.render_ctx);
ctx.draw_image(
&img,
bounds,
druid::piet::InterpolationMode::NearestNeighbor,
);
ctx.with_save(|ctx| {
ctx.transform(Affine::scale_non_uniform(bounds.width(), bounds.height()));
// Draw the dot of the `i` on top of the image data.
let i_dot = Circle::new((0.775, 0.18), 0.05);
let i_dot_brush = ctx.solid_brush(Color::WHITE);
ctx.fill(i_dot, &i_dot_brush);
// Cross out Xi because it's going dormant :'(
let mut spare = BezPath::new();
spare.move_to((0.1, 0.1));
spare.line_to((0.2, 0.1));
spare.line_to((0.9, 0.9));
spare.line_to((0.8, 0.9));
spare.close_path();
let spare_brush = ctx
.gradient(FixedLinearGradient {
start: (0.0, 0.0).into(),
end: (1.0, 1.0).into(),
stops: vec![
GradientStop {
pos: 0.0,
color: Color::rgb(1.0, 0.0, 0.0),
},
GradientStop {
pos: 1.0,
color: Color::rgb(0.4, 0.0, 0.0),
},
],
})
.unwrap();
ctx.fill(spare, &spare_brush);
});
}
// Grid widget
const DEFAULT_GRID_CELL_SIZE: Size = Size::new(100.0, 100.0);
const DEFAULT_GRID_SPACING: f64 = 10.0;
pub struct SquaresGrid<T> {
widgets: Vec<WidgetPod<T, Box<dyn Widget<T>>>>,
/// The number of widgets we can fit in the grid given the grid size.
drawable_widgets: usize,
cell_size: Size,
spacing: f64,
}
impl<T> SquaresGrid<T> {
pub fn new() -> Self {
SquaresGrid {
widgets: vec![],
drawable_widgets: 0,
cell_size: DEFAULT_GRID_CELL_SIZE,
spacing: DEFAULT_GRID_SPACING,
}
}
pub fn with_spacing(mut self, spacing: f64) -> Self {
self.spacing = spacing;
self
}
pub fn with_cell_size(mut self, cell_size: Size) -> Self {
self.cell_size = cell_size;
self
}
pub fn with_child(mut self, widget: impl Widget<T> + 'static) -> Self {
self.widgets.push(WidgetPod::new(Box::new(widget)));
self
}
}
impl<T> Default for SquaresGrid<T> {
fn default() -> Self {
Self::new()
}
}
impl<T: Data> Widget<T> for SquaresGrid<T> {
fn event(&mut self, ctx: &mut EventCtx, event: &Event, data: &mut T, env: &Env) {
for widget in self.widgets.iter_mut() {
widget.event(ctx, event, data, env);
}
}
fn lifecycle(&mut self, ctx: &mut LifeCycleCtx, event: &LifeCycle, data: &T, env: &Env) {
for widget in self.widgets.iter_mut() {
widget.lifecycle(ctx, event, data, env);
}
}
fn update(&mut self, ctx: &mut UpdateCtx, _old_data: &T, data: &T, env: &Env) {
for widget in self.widgets.iter_mut() {
widget.update(ctx, data, env);
}
}
fn layout(&mut self, ctx: &mut LayoutCtx, bc: &BoxConstraints, data: &T, env: &Env) -> Size {
let count = self.widgets.len() as f64;
// The space needed to lay all elements out on a single line.
let ideal_width = (self.cell_size.width + self.spacing + 1.0) * count;
// Constrain the width.
let width = ideal_width.min(bc.max().width).max(bc.min().width);
// Given the width, the space needed to lay out all elements (as many as possible on each
// line).
let cells_in_row =
((width - self.spacing) / (self.cell_size.width + self.spacing)).floor() as usize;
let (height, rows) = if cells_in_row > 0 {
let mut rows = (count / cells_in_row as f64).ceil() as usize;
let height_from_rows =
|rows: usize| (rows as f64) * (self.cell_size.height + self.spacing) + self.spacing;
let ideal_height = height_from_rows(rows);
// Constrain the height
let height = ideal_height.max(bc.min().height).min(bc.max().height);
// Now calcuate how many rows we can actually fit in
while height_from_rows(rows) > height && rows > 0 {
rows -= 1;
}
(height, rows)
} else {
(bc.min().height, 0)
};
// Constrain the number of drawn widgets by the number there is space to draw.
self.drawable_widgets = self.widgets.len().min(rows * cells_in_row);
// Now we have the width and height, we can lay out the children.
let mut x_position = self.spacing;
let mut y_position = self.spacing;
for (idx, widget) in self
.widgets
.iter_mut()
.take(self.drawable_widgets)
.enumerate()
{
widget.layout(
ctx,
&BoxConstraints::new(self.cell_size, self.cell_size),
data,
env,
);
widget.set_origin(ctx, data, env, Point::new(x_position, y_position));
// Increment position for the next cell
x_position += self.cell_size.width + self.spacing;
// If we can't fit in another cell in this row ...
if (idx + 1) % cells_in_row == 0 {
// ... then start a new row.
x_position = self.spacing;
y_position += self.cell_size.height + self.spacing;
}
}
Size { width, height }
}
fn paint(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) {
for widget in self.widgets.iter_mut().take(self.drawable_widgets) {
widget.paint(ctx, data, env);
}
}
}
| main |
mod.rs | //! Module processing functionality.
mod index;
mod interpolator;
mod layouter;
mod namer;
mod terminator;
mod typifier;
pub use index::IndexableLength;
pub use layouter::{Alignment, InvalidBaseType, Layouter, TypeLayout};
pub use namer::{EntryPointIndex, NameKey, Namer};
pub use terminator::ensure_block_returns;
pub use typifier::{ResolveContext, ResolveError, TypeResolution};
#[derive(Clone, Debug, thiserror::Error, PartialEq)]
pub enum ProcError {
#[error("type is not indexable, and has no length (validation error)")]
TypeNotIndexable,
#[error("array length is wrong kind of constant (validation error)")]
InvalidArraySizeConstant(crate::Handle<crate::Constant>),
}
impl From<super::StorageFormat> for super::ScalarKind {
fn from(format: super::StorageFormat) -> Self {
use super::{ScalarKind as Sk, StorageFormat as Sf};
match format {
Sf::R8Unorm => Sk::Float,
Sf::R8Snorm => Sk::Float,
Sf::R8Uint => Sk::Uint,
Sf::R8Sint => Sk::Sint,
Sf::R16Uint => Sk::Uint,
Sf::R16Sint => Sk::Sint,
Sf::R16Float => Sk::Float,
Sf::Rg8Unorm => Sk::Float,
Sf::Rg8Snorm => Sk::Float,
Sf::Rg8Uint => Sk::Uint,
Sf::Rg8Sint => Sk::Sint,
Sf::R32Uint => Sk::Uint,
Sf::R32Sint => Sk::Sint,
Sf::R32Float => Sk::Float,
Sf::Rg16Uint => Sk::Uint,
Sf::Rg16Sint => Sk::Sint,
Sf::Rg16Float => Sk::Float,
Sf::Rgba8Unorm => Sk::Float,
Sf::Rgba8Snorm => Sk::Float,
Sf::Rgba8Uint => Sk::Uint,
Sf::Rgba8Sint => Sk::Sint,
Sf::Rgb10a2Unorm => Sk::Float,
Sf::Rg11b10Float => Sk::Float,
Sf::Rg32Uint => Sk::Uint,
Sf::Rg32Sint => Sk::Sint,
Sf::Rg32Float => Sk::Float,
Sf::Rgba16Uint => Sk::Uint,
Sf::Rgba16Sint => Sk::Sint,
Sf::Rgba16Float => Sk::Float,
Sf::Rgba32Uint => Sk::Uint,
Sf::Rgba32Sint => Sk::Sint,
Sf::Rgba32Float => Sk::Float,
}
}
}
impl super::ScalarValue {
pub fn scalar_kind(&self) -> super::ScalarKind {
match *self {
Self::Uint(_) => super::ScalarKind::Uint,
Self::Sint(_) => super::ScalarKind::Sint,
Self::Float(_) => super::ScalarKind::Float,
Self::Bool(_) => super::ScalarKind::Bool,
}
}
}
pub const POINTER_SPAN: u32 = 4;
impl super::TypeInner {
pub fn scalar_kind(&self) -> Option<super::ScalarKind> |
pub fn pointer_class(&self) -> Option<crate::StorageClass> {
match *self {
Self::Pointer { class, .. } => Some(class),
Self::ValuePointer { class, .. } => Some(class),
_ => None,
}
}
pub fn span(&self, constants: &super::Arena<super::Constant>) -> u32 {
match *self {
Self::Scalar { kind: _, width } | Self::Atomic { kind: _, width } => width as u32,
Self::Vector {
size,
kind: _,
width,
} => (size as u8 * width) as u32,
// matrices are treated as arrays of aligned columns
Self::Matrix {
columns,
rows,
width,
} => {
let aligned_rows = if rows > crate::VectorSize::Bi { 4 } else { 2 };
columns as u32 * aligned_rows * width as u32
}
Self::Pointer { .. } | Self::ValuePointer { .. } => POINTER_SPAN,
Self::Array {
base: _,
size,
stride,
} => {
let count = match size {
super::ArraySize::Constant(handle) => {
// Bad array lengths will be caught during validation.
constants[handle].to_array_length().unwrap_or(1)
}
// A dynamically-sized array has to have at least one element
super::ArraySize::Dynamic => 1,
};
count * stride
}
Self::Struct { span, .. } => span,
Self::Image { .. } | Self::Sampler { .. } => 0,
}
}
}
impl super::MathFunction {
pub fn argument_count(&self) -> usize {
match *self {
// comparison
Self::Abs => 1,
Self::Min => 2,
Self::Max => 2,
Self::Clamp => 3,
// trigonometry
Self::Cos => 1,
Self::Cosh => 1,
Self::Sin => 1,
Self::Sinh => 1,
Self::Tan => 1,
Self::Tanh => 1,
Self::Acos => 1,
Self::Asin => 1,
Self::Atan => 1,
Self::Atan2 => 2,
// decomposition
Self::Ceil => 1,
Self::Floor => 1,
Self::Round => 1,
Self::Fract => 1,
Self::Trunc => 1,
Self::Modf => 2,
Self::Frexp => 2,
Self::Ldexp => 2,
// exponent
Self::Exp => 1,
Self::Exp2 => 1,
Self::Log => 1,
Self::Log2 => 1,
Self::Pow => 2,
// geometry
Self::Dot => 2,
Self::Outer => 2,
Self::Cross => 2,
Self::Distance => 2,
Self::Length => 1,
Self::Normalize => 1,
Self::FaceForward => 3,
Self::Reflect => 2,
Self::Refract => 3,
// computational
Self::Sign => 1,
Self::Fma => 3,
Self::Mix => 3,
Self::Step => 2,
Self::SmoothStep => 3,
Self::Sqrt => 1,
Self::InverseSqrt => 1,
Self::Inverse => 1,
Self::Transpose => 1,
Self::Determinant => 1,
// bits
Self::CountOneBits => 1,
Self::ReverseBits => 1,
}
}
}
impl crate::Expression {
/// Returns true if the expression is considered emitted at the start of a function.
pub fn needs_pre_emit(&self) -> bool {
match *self {
Self::Constant(_)
| Self::FunctionArgument(_)
| Self::GlobalVariable(_)
| Self::LocalVariable(_) => true,
_ => false,
}
}
/// Return true if this expression is a dynamic array index, for [`Access`].
///
/// This method returns true if this expression is a dynamically computed
/// index, and as such can only be used to index matrices and arrays when
/// they appear behind a pointer. See the documentation for [`Access`] for
/// details.
///
/// Note, this does not check the _type_ of the given expression. It's up to
/// the caller to establish that the `Access` expression is well-typed
/// through other means, like [`ResolveContext`].
///
/// [`Access`]: crate::Expression::Access
/// [`ResolveContext`]: crate::proc::ResolveContext
pub fn is_dynamic_index(&self, module: &crate::Module) -> bool {
if let Self::Constant(handle) = *self {
let constant = &module.constants[handle];
constant.specialization.is_some()
} else {
true
}
}
}
impl crate::SampleLevel {
pub fn implicit_derivatives(&self) -> bool {
match *self {
Self::Auto | Self::Bias(_) => true,
Self::Zero | Self::Exact(_) | Self::Gradient { .. } => false,
}
}
}
impl crate::Constant {
/// Interpret this constant as an array length, and return it as a `u32`.
///
/// Ignore any specialization available for this constant; return its
/// unspecialized value.
///
/// If the constant has an inappropriate kind (non-scalar or non-integer) or
/// value (negative, out of range for u32), return `None`. This usually
/// indicates an error, but only the caller has enough information to report
/// the error helpfully: in back ends, it's a validation error, but in front
/// ends, it may indicate ill-formed input (for example, a SPIR-V
/// `OpArrayType` referring to an inappropriate `OpConstant`). So we return
/// `Option` and let the caller sort things out.
pub(crate) fn to_array_length(&self) -> Option<u32> {
use std::convert::TryInto;
match self.inner {
crate::ConstantInner::Scalar { value, width: _ } => match value {
crate::ScalarValue::Uint(value) => value.try_into().ok(),
// Accept a signed integer size to avoid
// requiring an explicit uint
// literal. Type inference should make
// this unnecessary.
crate::ScalarValue::Sint(value) => value.try_into().ok(),
_ => None,
},
// caught by type validation
crate::ConstantInner::Composite { .. } => None,
}
}
}
impl crate::Binding {
pub fn to_built_in(&self) -> Option<crate::BuiltIn> {
match *self {
Self::BuiltIn(bi) => Some(bi),
Self::Location { .. } => None,
}
}
}
//TODO: should we use an existing crate for hashable floats?
impl PartialEq for crate::ScalarValue {
fn eq(&self, other: &Self) -> bool {
match (*self, *other) {
(Self::Uint(a), Self::Uint(b)) => a == b,
(Self::Sint(a), Self::Sint(b)) => a == b,
(Self::Float(a), Self::Float(b)) => a.to_bits() == b.to_bits(),
(Self::Bool(a), Self::Bool(b)) => a == b,
_ => false,
}
}
}
impl Eq for crate::ScalarValue {}
impl std::hash::Hash for crate::ScalarValue {
fn hash<H: std::hash::Hasher>(&self, hasher: &mut H) {
match *self {
Self::Sint(v) => v.hash(hasher),
Self::Uint(v) => v.hash(hasher),
Self::Float(v) => v.to_bits().hash(hasher),
Self::Bool(v) => v.hash(hasher),
}
}
}
impl super::SwizzleComponent {
pub const XYZW: [Self; 4] = [Self::X, Self::Y, Self::Z, Self::W];
pub fn index(&self) -> u32 {
match *self {
Self::X => 0,
Self::Y => 1,
Self::Z => 2,
Self::W => 3,
}
}
pub fn from_index(idx: u32) -> Self {
match idx {
0 => Self::X,
1 => Self::Y,
2 => Self::Z,
_ => Self::W,
}
}
}
#[test]
fn test_matrix_size() {
let constants = crate::Arena::new();
assert_eq!(
crate::TypeInner::Matrix {
columns: crate::VectorSize::Tri,
rows: crate::VectorSize::Tri,
width: 4
}
.span(&constants),
48
);
}
| {
match *self {
super::TypeInner::Scalar { kind, .. } | super::TypeInner::Vector { kind, .. } => {
Some(kind)
}
super::TypeInner::Matrix { .. } => Some(super::ScalarKind::Float),
_ => None,
}
} |
ops.rs | // Copyright 2015-2017 Brian Smith.
//
// Permission to use, copy, modify, and/or distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHORS DISCLAIM ALL WARRANTIES
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
// SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
// OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
// CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
//! Elliptic curve operations on the birationally equivalent curves Curve25519
//! and Edwards25519.
use crate::{
bssl, error,
limb::{Limb, LIMB_BITS},
};
use core::marker::PhantomData;
// Elem<T>` is `fe` in curve25519/internal.h.
// Elem<L> is `fe_loose` in curve25519/internal.h.
// Keep this in sync with curve25519/internal.h.
#[repr(C)]
pub struct Elem<E: Encoding> {
limbs: [Limb; ELEM_LIMBS], // This is called `v` in the C code.
encoding: PhantomData<E>,
}
pub trait Encoding {}
pub struct T;
impl Encoding for T {}
const ELEM_LIMBS: usize = 5 * 64 / LIMB_BITS;
impl<E: Encoding> Elem<E> {
fn zero() -> Self {
Self {
limbs: Default::default(),
encoding: PhantomData,
}
}
}
impl Elem<T> {
fn negate(&mut self) {
unsafe {
GFp_x25519_fe_neg(self);
}
}
}
// An encoding of a curve point. If on Curve25519, it should be encoded as
// described in Section 5 of [RFC 7748]. If on Edwards25519, it should be
// encoded as described in section 5.1.2 of [RFC 8032].
//
// [RFC 7748] https://tools.ietf.org/html/rfc7748#section-5
// [RFC 8032] https://tools.ietf.org/html/rfc8032#section-5.1.2
pub type EncodedPoint = [u8; ELEM_LEN];
pub const ELEM_LEN: usize = 32;
pub type Scalar = [u8; SCALAR_LEN];
pub const SCALAR_LEN: usize = 32;
pub type UnreducedScalar = [u8; UNREDUCED_SCALAR_LEN];
const UNREDUCED_SCALAR_LEN: usize = SCALAR_LEN * 2;
// Keep this in sync with `ge_p3` in curve25519/internal.h.
#[repr(C)]
pub struct ExtPoint {
x: Elem<T>,
y: Elem<T>,
z: Elem<T>,
t: Elem<T>,
}
impl ExtPoint {
pub fn new_at_infinity() -> Self {
Self {
x: Elem::zero(),
y: Elem::zero(),
z: Elem::zero(),
t: Elem::zero(),
}
}
pub fn from_encoded_point_vartime(encoded: &EncodedPoint) -> Result<Self, error::Unspecified> {
let mut point = Self::new_at_infinity();
Result::from(unsafe { GFp_x25519_ge_frombytes_vartime(&mut point, encoded) })
.map(|()| point)
}
pub fn into_encoded_point(self) -> EncodedPoint { encode_point(self.x, self.y, self.z) }
pub fn invert_vartime(&mut self) {
self.x.negate();
self.t.negate();
}
}
// Keep this in sync with `ge_p2` in curve25519/internal.h.
#[repr(C)]
pub struct Point {
x: Elem<T>,
y: Elem<T>,
z: Elem<T>,
}
impl Point {
pub fn new_at_infinity() -> Self {
Self {
x: Elem::zero(),
y: Elem::zero(),
z: Elem::zero(),
}
}
pub fn into_encoded_point(self) -> EncodedPoint { encode_point(self.x, self.y, self.z) }
}
fn encode_point(x: Elem<T>, y: Elem<T>, z: Elem<T>) -> EncodedPoint {
let mut bytes = [0; ELEM_LEN];
let sign_bit: u8 = unsafe {
let mut recip = Elem::zero();
GFp_x25519_fe_invert(&mut recip, &z);
let mut x_over_z = Elem::zero();
GFp_x25519_fe_mul_ttt(&mut x_over_z, &x, &recip);
let mut y_over_z = Elem::zero();
GFp_x25519_fe_mul_ttt(&mut y_over_z, &y, &recip);
GFp_x25519_fe_tobytes(&mut bytes, &y_over_z);
GFp_x25519_fe_isnegative(&x_over_z)
};
// The preceding computations must execute in constant time, but this
// doesn't need to.
bytes[ELEM_LEN - 1] ^= sign_bit << 7;
bytes
}
extern "C" {
fn GFp_x25519_fe_invert(out: &mut Elem<T>, z: &Elem<T>);
fn GFp_x25519_fe_isnegative(elem: &Elem<T>) -> u8;
fn GFp_x25519_fe_mul_ttt(h: &mut Elem<T>, f: &Elem<T>, g: &Elem<T>);
fn GFp_x25519_fe_neg(f: &mut Elem<T>); | } | fn GFp_x25519_fe_tobytes(bytes: &mut EncodedPoint, elem: &Elem<T>);
fn GFp_x25519_ge_frombytes_vartime(h: &mut ExtPoint, s: &EncodedPoint) -> bssl::Result; |
next.config.js | const withOffline = require('next-offline')
module.exports = withOffline({
workboxOpts: {
swDest: process.env.NEXT_EXPORT
? 'service-worker.js'
: 'static/service-worker.js', | options: {
cacheName: 'offlineCache',
expiration: {
maxEntries: 200,
},
},
},
],
},
experimental: {
async rewrites() {
return [
{
source: '/service-worker.js',
destination: '/_next/static/service-worker.js',
},
]
},
},
}) | runtimeCaching: [
{
urlPattern: /^https?.*/,
handler: 'NetworkFirst', |
test_tokenization_tapas.py | # coding=utf-8
# Copyright 2020 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import os
import shutil
import tempfile
import unittest
from typing import List
import numpy as np
import pandas as pd
from transformers import AddedToken
from transformers.models.tapas.tokenization_tapas import (
VOCAB_FILES_NAMES,
BasicTokenizer,
TapasTokenizer,
WordpieceTokenizer,
_is_control,
_is_punctuation,
_is_whitespace,
)
from transformers.testing_utils import (
is_pt_tf_cross_test,
require_pandas,
require_scatter,
require_tensorflow_probability,
require_tokenizers,
require_torch,
slow,
)
from ...test_tokenization_common import TokenizerTesterMixin, filter_non_english, merge_model_tokenizer_mappings
@require_tokenizers
@require_pandas
class TapasTokenizationTest(TokenizerTesterMixin, unittest.TestCase):
tokenizer_class = TapasTokenizer
test_rust_tokenizer = False
space_between_special_tokens = True
from_pretrained_filter = filter_non_english
test_seq2seq = False
def get_table(
self,
tokenizer: TapasTokenizer,
length=5,
):
toks = [tokenizer.decode([i], clean_up_tokenization_spaces=False) for i in range(len(tokenizer))]
if length == 0:
data = {}
else:
data = {toks[0]: [toks[tok] for tok in range(1, length)]}
table = pd.DataFrame.from_dict(data)
return table
def get_table_and_query(
self,
tokenizer: TapasTokenizer,
length=5,
):
toks = [tokenizer.decode([i], clean_up_tokenization_spaces=False) for i in range(len(tokenizer))]
table = self.get_table(tokenizer, length=length - 3)
query = " ".join(toks[:3])
return table, query
def get_clean_sequence(
self,
tokenizer: TapasTokenizer,
with_prefix_space=False,
max_length=20,
min_length=5,
empty_table: bool = False,
add_special_tokens: bool = True,
return_table_and_query: bool = False,
):
toks = [tokenizer.decode([i], clean_up_tokenization_spaces=False) for i in range(len(tokenizer))]
if empty_table:
table = pd.DataFrame.from_dict({})
query = " ".join(toks[:min_length])
else:
data = {toks[0]: [toks[tok] for tok in range(1, min_length - 3)]}
table = pd.DataFrame.from_dict(data)
query = " ".join(toks[:3])
output_ids = tokenizer.encode(table, query, add_special_tokens=add_special_tokens)
output_txt = tokenizer.decode(output_ids)
assert len(output_ids) >= min_length, "Update the code to generate the sequences so that they are larger"
assert len(output_ids) <= max_length, "Update the code to generate the sequences so that they are smaller"
if return_table_and_query:
return output_txt, output_ids, table, query
return output_txt, output_ids
def setUp(self):
super().setUp()
vocab_tokens = [
"[UNK]",
"[CLS]",
"[SEP]",
"[PAD]",
"[MASK]",
"want",
"##want",
"##ed",
"wa",
"un",
"runn",
"##ing",
",",
"low",
"lowest",
]
self.vocab_file = os.path.join(self.tmpdirname, VOCAB_FILES_NAMES["vocab_file"])
with open(self.vocab_file, "w", encoding="utf-8") as vocab_writer:
vocab_writer.write("".join([x + "\n" for x in vocab_tokens]))
def get_input_output_texts(self, tokenizer):
input_text = "UNwant\u00E9d,running"
output_text = "unwanted, running"
return input_text, output_text
@require_tensorflow_probability
def test_tf_encode_plus_sent_to_model(self):
super().test_tf_encode_plus_sent_to_model()
def test_rust_and_python_full_tokenizers(self):
if not self.test_rust_tokenizer:
return
tokenizer = self.get_tokenizer()
rust_tokenizer = self.get_rust_tokenizer()
sequence = "UNwant\u00E9d,running"
tokens = tokenizer.tokenize(sequence)
rust_tokens = rust_tokenizer.tokenize(sequence)
self.assertListEqual(tokens, rust_tokens)
ids = tokenizer.encode(sequence, add_special_tokens=False)
rust_ids = rust_tokenizer.encode(sequence, add_special_tokens=False)
self.assertListEqual(ids, rust_ids)
rust_tokenizer = self.get_rust_tokenizer()
ids = tokenizer.encode(sequence)
rust_ids = rust_tokenizer.encode(sequence)
self.assertListEqual(ids, rust_ids)
# With lower casing
tokenizer = self.get_tokenizer(do_lower_case=True)
rust_tokenizer = self.get_rust_tokenizer(do_lower_case=True)
sequence = "UNwant\u00E9d,running"
tokens = tokenizer.tokenize(sequence)
rust_tokens = rust_tokenizer.tokenize(sequence)
self.assertListEqual(tokens, rust_tokens)
ids = tokenizer.encode(sequence, add_special_tokens=False)
rust_ids = rust_tokenizer.encode(sequence, add_special_tokens=False)
self.assertListEqual(ids, rust_ids)
rust_tokenizer = self.get_rust_tokenizer()
ids = tokenizer.encode(sequence)
rust_ids = rust_tokenizer.encode(sequence)
self.assertListEqual(ids, rust_ids)
def test_chinese(self):
tokenizer = BasicTokenizer()
self.assertListEqual(tokenizer.tokenize("ah\u535A\u63A8zz"), ["ah", "\u535A", "\u63A8", "zz"])
def test_basic_tokenizer_lower(self):
tokenizer = BasicTokenizer(do_lower_case=True)
self.assertListEqual(
tokenizer.tokenize(" \tHeLLo!how \n Are yoU? "), ["hello", "!", "how", "are", "you", "?"]
)
self.assertListEqual(tokenizer.tokenize("H\u00E9llo"), ["hello"])
def test_basic_tokenizer_lower_strip_accents_false(self):
tokenizer = BasicTokenizer(do_lower_case=True, strip_accents=False)
self.assertListEqual(
tokenizer.tokenize(" \tHäLLo!how \n Are yoU? "), ["hällo", "!", "how", "are", "you", "?"]
)
self.assertListEqual(tokenizer.tokenize("H\u00E9llo"), ["h\u00E9llo"])
def test_basic_tokenizer_lower_strip_accents_true(self):
tokenizer = BasicTokenizer(do_lower_case=True, strip_accents=True)
self.assertListEqual(
tokenizer.tokenize(" \tHäLLo!how \n Are yoU? "), ["hallo", "!", "how", "are", "you", "?"]
)
self.assertListEqual(tokenizer.tokenize("H\u00E9llo"), ["hello"])
def test_basic_tokenizer_lower_strip_accents_default(self):
tokenizer = BasicTokenizer(do_lower_case=True)
self.assertListEqual(
tokenizer.tokenize(" \tHäLLo!how \n Are yoU? "), ["hallo", "!", "how", "are", "you", "?"]
)
self.assertListEqual(tokenizer.tokenize("H\u00E9llo"), ["hello"])
def test_basic_tokenizer_no_lower(self):
tokenizer = BasicTokenizer(do_lower_case=False)
self.assertListEqual(
tokenizer.tokenize(" \tHeLLo!how \n Are yoU? "), ["HeLLo", "!", "how", "Are", "yoU", "?"]
)
def test_basic_tokenizer_no_lower_strip_accents_false(self):
tokenizer = BasicTokenizer(do_lower_case=False, strip_accents=False)
self.assertListEqual(
tokenizer.tokenize(" \tHäLLo!how \n Are yoU? "), ["HäLLo", "!", "how", "Are", "yoU", "?"]
)
def test_basic_tokenizer_no_lower_strip_accents_true(self):
tokenizer = BasicTokenizer(do_lower_case=False, strip_accents=True)
self.assertListEqual(
tokenizer.tokenize(" \tHäLLo!how \n Are yoU? "), ["HaLLo", "!", "how", "Are", "yoU", "?"]
)
def test_basic_tokenizer_respects_never_split_tokens(self):
tokenizer = BasicTokenizer(do_lower_case=False, never_split=["[UNK]"])
self.assertListEqual(
tokenizer.tokenize(" \tHeLLo!how \n Are yoU? [UNK]"), ["HeLLo", "!", "how", "Are", "yoU", "?", "[UNK]"]
)
def test_wordpiece_tokenizer(self):
vocab_tokens = ["[UNK]", "[CLS]", "[SEP]", "want", "##want", "##ed", "wa", "un", "runn", "##ing"]
vocab = {}
for i, token in enumerate(vocab_tokens):
vocab[token] = i
tokenizer = WordpieceTokenizer(vocab=vocab, unk_token="[UNK]")
self.assertListEqual(tokenizer.tokenize(""), [])
self.assertListEqual(tokenizer.tokenize("unwanted running"), ["un", "##want", "##ed", "runn", "##ing"])
self.assertListEqual(tokenizer.tokenize("unwantedX running"), ["[UNK]", "runn", "##ing"])
def test_is_whitespace(self):
self.assertTrue(_is_whitespace(" "))
self.assertTrue(_is_whitespace("\t"))
self.assertTrue(_is_whitespace("\r"))
self.assertTrue(_is_whitespace("\n"))
self.assertTrue(_is_whitespace("\u00A0"))
self.assertFalse(_is_whitespace("A"))
self.assertFalse(_is_whitespace("-"))
def test_is_control(self):
self.assertTrue(_is_control("\u0005"))
self.assertFalse(_is_control("A"))
self.assertFalse(_is_control(" "))
self.assertFalse(_is_control("\t"))
self.assertFalse(_is_control("\r"))
def test_is |
self.assertTrue(_is_punctuation("-"))
self.assertTrue(_is_punctuation("$"))
self.assertTrue(_is_punctuation("`"))
self.assertTrue(_is_punctuation("."))
self.assertFalse(_is_punctuation("A"))
self.assertFalse(_is_punctuation(" "))
def test_clean_text(self):
tokenizer = self.get_tokenizer()
# Example taken from the issue https://github.com/huggingface/tokenizers/issues/340
self.assertListEqual(
[tokenizer.tokenize(t) for t in ["Test", "\xad", "test"]], [["[UNK]"], ["[EMPTY]"], ["[UNK]"]]
)
@slow
def test_sequence_builders(self):
tokenizer = self.tokenizer_class.from_pretrained("google/tapas-base-finetuned-wtq")
empty_table = self.get_table(tokenizer, length=0)
table = self.get_table(tokenizer, length=10)
text = tokenizer.encode(table, add_special_tokens=False)
text_2 = tokenizer.encode(empty_table, "multi-sequence build", add_special_tokens=False)
encoded_pair = tokenizer.build_inputs_with_special_tokens(text, text_2)
assert encoded_pair == [101] + text + [102] + text_2
def test_offsets_with_special_characters(self):
for tokenizer, pretrained_name, kwargs in self.tokenizers_list:
with self.subTest(f"{tokenizer.__class__.__name__} ({pretrained_name})"):
tokenizer_r = self.rust_tokenizer_class.from_pretrained(pretrained_name, **kwargs)
sentence = f"A, naïve {tokenizer_r.mask_token} AllenNLP sentence."
tokens = tokenizer_r.encode_plus(
sentence,
return_attention_mask=False,
return_token_type_ids=False,
return_offsets_mapping=True,
add_special_tokens=True,
)
do_lower_case = tokenizer_r.do_lower_case if hasattr(tokenizer_r, "do_lower_case") else False
expected_results = (
[
((0, 0), tokenizer_r.cls_token),
((0, 1), "A"),
((1, 2), ","),
((3, 5), "na"),
((5, 6), "##ï"),
((6, 8), "##ve"),
((9, 15), tokenizer_r.mask_token),
((16, 21), "Allen"),
((21, 23), "##NL"),
((23, 24), "##P"),
((25, 33), "sentence"),
((33, 34), "."),
((0, 0), tokenizer_r.sep_token),
]
if not do_lower_case
else [
((0, 0), tokenizer_r.cls_token),
((0, 1), "a"),
((1, 2), ","),
((3, 8), "naive"),
((9, 15), tokenizer_r.mask_token),
((16, 21), "allen"),
((21, 23), "##nl"),
((23, 24), "##p"),
((25, 33), "sentence"),
((33, 34), "."),
((0, 0), tokenizer_r.sep_token),
]
)
self.assertEqual(
[e[1] for e in expected_results], tokenizer_r.convert_ids_to_tokens(tokens["input_ids"])
)
self.assertEqual([e[0] for e in expected_results], tokens["offset_mapping"])
def test_add_special_tokens(self):
tokenizers: List[TapasTokenizer] = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
input_table = self.get_table(tokenizer, length=0)
special_token = "[SPECIAL_TOKEN]"
tokenizer.add_special_tokens({"cls_token": special_token})
encoded_special_token = tokenizer.encode(input_table, special_token, add_special_tokens=False)
self.assertEqual(len(encoded_special_token), 1)
decoded = tokenizer.decode(encoded_special_token, skip_special_tokens=True)
self.assertTrue(special_token not in decoded)
def test_add_tokens_tokenizer(self):
tokenizers: List[TapasTokenizer] = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
table = self.get_table(tokenizer, length=0)
vocab_size = tokenizer.vocab_size
all_size = len(tokenizer)
self.assertNotEqual(vocab_size, 0)
# We usually have added tokens from the start in tests because our vocab fixtures are
# smaller than the original vocabs - let's not assert this
# self.assertEqual(vocab_size, all_size)
new_toks = ["aaaaa bbbbbb", "cccccccccdddddddd"]
added_toks = tokenizer.add_tokens(new_toks)
vocab_size_2 = tokenizer.vocab_size
all_size_2 = len(tokenizer)
self.assertNotEqual(vocab_size_2, 0)
self.assertEqual(vocab_size, vocab_size_2)
self.assertEqual(added_toks, len(new_toks))
self.assertEqual(all_size_2, all_size + len(new_toks))
tokens = tokenizer.encode(table, "aaaaa bbbbbb low cccccccccdddddddd l", add_special_tokens=False)
self.assertGreaterEqual(len(tokens), 4)
self.assertGreater(tokens[0], tokenizer.vocab_size - 1)
self.assertGreater(tokens[-2], tokenizer.vocab_size - 1)
new_toks_2 = {"eos_token": ">>>>|||<||<<|<<", "pad_token": "<<<<<|||>|>>>>|>"}
added_toks_2 = tokenizer.add_special_tokens(new_toks_2)
vocab_size_3 = tokenizer.vocab_size
all_size_3 = len(tokenizer)
self.assertNotEqual(vocab_size_3, 0)
self.assertEqual(vocab_size, vocab_size_3)
self.assertEqual(added_toks_2, len(new_toks_2))
self.assertEqual(all_size_3, all_size_2 + len(new_toks_2))
tokens = tokenizer.encode(
table,
">>>>|||<||<<|<< aaaaabbbbbb low cccccccccdddddddd <<<<<|||>|>>>>|> l",
add_special_tokens=False,
)
self.assertGreaterEqual(len(tokens), 6)
self.assertGreater(tokens[0], tokenizer.vocab_size - 1)
self.assertGreater(tokens[0], tokens[1])
self.assertGreater(tokens[-2], tokenizer.vocab_size - 1)
self.assertGreater(tokens[-2], tokens[-3])
self.assertEqual(tokens[0], tokenizer.eos_token_id)
self.assertEqual(tokens[-2], tokenizer.pad_token_id)
@require_tokenizers
def test_encode_decode_with_spaces(self):
tokenizers = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
table = self.get_table(tokenizer, length=0)
new_toks = [AddedToken("[ABC]", normalized=False), AddedToken("[DEF]", normalized=False)]
tokenizer.add_tokens(new_toks)
input = "[ABC][DEF][ABC][DEF]"
if self.space_between_special_tokens:
output = "[ABC] [DEF] [ABC] [DEF]"
else:
output = input
encoded = tokenizer.encode(table, input, add_special_tokens=False)
decoded = tokenizer.decode(encoded, spaces_between_special_tokens=self.space_between_special_tokens)
self.assertIn(decoded, [output, output.lower()])
def test_encode_plus_with_padding(self):
tokenizers = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
table = self.get_table(tokenizer, length=0)
sequence = "Sequence"
# check correct behaviour if no pad_token_id exists and add it eventually
self._check_no_pad_token_padding(tokenizer, sequence)
padding_size = 10
padding_idx = tokenizer.pad_token_id
token_type_padding_idx = tokenizer.pad_token_type_id
encoded_sequence = tokenizer.encode_plus(table, sequence, return_special_tokens_mask=True)
input_ids = encoded_sequence["input_ids"]
special_tokens_mask = encoded_sequence["special_tokens_mask"]
sequence_length = len(input_ids)
# Test 'longest' and 'no_padding' don't do anything
tokenizer.padding_side = "right"
not_padded_sequence = tokenizer.encode_plus(
table,
sequence,
padding=False,
return_special_tokens_mask=True,
)
not_padded_input_ids = not_padded_sequence["input_ids"]
not_padded_special_tokens_mask = not_padded_sequence["special_tokens_mask"]
not_padded_sequence_length = len(not_padded_input_ids)
assert sequence_length == not_padded_sequence_length
assert input_ids == not_padded_input_ids
assert special_tokens_mask == not_padded_special_tokens_mask
not_padded_sequence = tokenizer.encode_plus(
table,
sequence,
padding=False,
return_special_tokens_mask=True,
)
not_padded_input_ids = not_padded_sequence["input_ids"]
not_padded_special_tokens_mask = not_padded_sequence["special_tokens_mask"]
not_padded_sequence_length = len(not_padded_input_ids)
assert sequence_length == not_padded_sequence_length
assert input_ids == not_padded_input_ids
assert special_tokens_mask == not_padded_special_tokens_mask
# Test right padding
tokenizer.padding_side = "right"
right_padded_sequence = tokenizer.encode_plus(
table,
sequence,
max_length=sequence_length + padding_size,
padding="max_length",
return_special_tokens_mask=True,
)
right_padded_input_ids = right_padded_sequence["input_ids"]
right_padded_special_tokens_mask = right_padded_sequence["special_tokens_mask"]
right_padded_sequence_length = len(right_padded_input_ids)
assert sequence_length + padding_size == right_padded_sequence_length
assert input_ids + [padding_idx] * padding_size == right_padded_input_ids
assert special_tokens_mask + [1] * padding_size == right_padded_special_tokens_mask
# Test left padding
tokenizer.padding_side = "left"
left_padded_sequence = tokenizer.encode_plus(
table,
sequence,
max_length=sequence_length + padding_size,
padding="max_length",
return_special_tokens_mask=True,
)
left_padded_input_ids = left_padded_sequence["input_ids"]
left_padded_special_tokens_mask = left_padded_sequence["special_tokens_mask"]
left_padded_sequence_length = len(left_padded_input_ids)
assert sequence_length + padding_size == left_padded_sequence_length
assert [padding_idx] * padding_size + input_ids == left_padded_input_ids
assert [1] * padding_size + special_tokens_mask == left_padded_special_tokens_mask
if "token_type_ids" in tokenizer.model_input_names:
token_type_ids = encoded_sequence["token_type_ids"]
left_padded_token_type_ids = left_padded_sequence["token_type_ids"]
right_padded_token_type_ids = right_padded_sequence["token_type_ids"]
assert (
token_type_ids + [[token_type_padding_idx] * 7] * padding_size == right_padded_token_type_ids
)
assert [[token_type_padding_idx] * 7] * padding_size + token_type_ids == left_padded_token_type_ids
if "attention_mask" in tokenizer.model_input_names:
attention_mask = encoded_sequence["attention_mask"]
right_padded_attention_mask = right_padded_sequence["attention_mask"]
left_padded_attention_mask = left_padded_sequence["attention_mask"]
assert attention_mask + [0] * padding_size == right_padded_attention_mask
assert [0] * padding_size + attention_mask == left_padded_attention_mask
def test_internal_consistency(self):
tokenizers = self.get_tokenizers()
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
table = self.get_table(tokenizer, length=0)
input_text, output_text = self.get_input_output_texts(tokenizer)
tokens = tokenizer.tokenize(input_text)
ids = tokenizer.convert_tokens_to_ids(tokens)
ids_2 = tokenizer.encode(table, input_text, add_special_tokens=False)
self.assertListEqual(ids, ids_2)
tokens_2 = tokenizer.convert_ids_to_tokens(ids)
self.assertNotEqual(len(tokens_2), 0)
text_2 = tokenizer.decode(ids)
self.assertIsInstance(text_2, str)
self.assertEqual(text_2, output_text)
def test_mask_output(self):
tokenizers = self.get_tokenizers(fast=False, do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
table, query = self.get_table_and_query(tokenizer)
if (
tokenizer.build_inputs_with_special_tokens.__qualname__.split(".")[0] != "PreTrainedTokenizer"
and "token_type_ids" in tokenizer.model_input_names
):
information = tokenizer.encode_plus(table, query, add_special_tokens=True)
sequences, mask = information["input_ids"], information["token_type_ids"]
self.assertEqual(len(sequences), len(mask))
@unittest.skip("TAPAS tokenizer only handles two sequences.")
def test_maximum_encoding_length_pair_input(self):
pass
@unittest.skip("TAPAS tokenizer only handles two sequences.")
def test_maximum_encoding_length_single_input(self):
pass
def test_number_of_added_tokens(self):
tokenizers = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
table, query = self.get_table_and_query(tokenizer)
sequences = tokenizer.encode(table, query, add_special_tokens=False)
attached_sequences = tokenizer.encode(table, query, add_special_tokens=True)
# Method is implemented (e.g. not GPT-2)
if len(attached_sequences) != 2:
self.assertEqual(
tokenizer.num_special_tokens_to_add(pair=True), len(attached_sequences) - len(sequences)
)
def test_padding_to_max_length(self):
"""We keep this test for backward compatibility but it should be removed when `pad_to_max_length` will be deprecated"""
tokenizers = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
table = self.get_table(tokenizer)
sequence = "Sequence"
padding_size = 10
# check correct behaviour if no pad_token_id exists and add it eventually
self._check_no_pad_token_padding(tokenizer, sequence)
padding_idx = tokenizer.pad_token_id
# Check that it correctly pads when a maximum length is specified along with the padding flag set to True
tokenizer.padding_side = "right"
encoded_sequence = tokenizer.encode(table, sequence)
sequence_length = len(encoded_sequence)
# FIXME: the next line should be padding(max_length) to avoid warning
padded_sequence = tokenizer.encode(
table, sequence, max_length=sequence_length + padding_size, padding=True
)
padded_sequence_length = len(padded_sequence)
assert sequence_length + padding_size == padded_sequence_length
assert encoded_sequence + [padding_idx] * padding_size == padded_sequence
# Check that nothing is done when a maximum length is not specified
encoded_sequence = tokenizer.encode(table, sequence)
sequence_length = len(encoded_sequence)
tokenizer.padding_side = "right"
padded_sequence_right = tokenizer.encode(table, sequence, pad_to_max_length=True)
padded_sequence_right_length = len(padded_sequence_right)
assert sequence_length == padded_sequence_right_length
assert encoded_sequence == padded_sequence_right
def test_call(self):
# Tests that all call wrap to encode_plus and batch_encode_plus
tokenizers = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
sequences = [
"Testing batch encode plus",
"Testing batch encode plus with different sequence lengths",
"Testing batch encode plus with different sequence lengths correctly pads",
]
# Test not batched
table = self.get_table(tokenizer, length=0)
encoded_sequences_1 = tokenizer.encode_plus(table, sequences[0])
encoded_sequences_2 = tokenizer(table, sequences[0])
self.assertEqual(encoded_sequences_1, encoded_sequences_2)
# Test not batched pairs
table = self.get_table(tokenizer, length=10)
encoded_sequences_1 = tokenizer.encode_plus(table, sequences[1])
encoded_sequences_2 = tokenizer(table, sequences[1])
self.assertEqual(encoded_sequences_1, encoded_sequences_2)
# Test batched
table = self.get_table(tokenizer, length=0)
encoded_sequences_1 = tokenizer.batch_encode_plus(table, sequences)
encoded_sequences_2 = tokenizer(table, sequences)
self.assertEqual(encoded_sequences_1, encoded_sequences_2)
def test_batch_encode_plus_batch_sequence_length(self):
# Tests that all encoded values have the correct size
tokenizers = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
table = self.get_table(tokenizer, length=0)
sequences = [
"Testing batch encode plus",
"Testing batch encode plus with different sequence lengths",
"Testing batch encode plus with different sequence lengths correctly pads",
]
encoded_sequences = [tokenizer.encode_plus(table, sequence) for sequence in sequences]
encoded_sequences_batch = tokenizer.batch_encode_plus(table, sequences, padding=False)
self.assertListEqual(
encoded_sequences, self.convert_batch_encode_plus_format_to_encode_plus(encoded_sequences_batch)
)
maximum_length = len(
max([encoded_sequence["input_ids"] for encoded_sequence in encoded_sequences], key=len)
)
# check correct behaviour if no pad_token_id exists and add it eventually
self._check_no_pad_token_padding(tokenizer, sequences)
encoded_sequences_padded = [
tokenizer.encode_plus(table, sequence, max_length=maximum_length, padding="max_length")
for sequence in sequences
]
encoded_sequences_batch_padded = tokenizer.batch_encode_plus(table, sequences, padding=True)
self.assertListEqual(
encoded_sequences_padded,
self.convert_batch_encode_plus_format_to_encode_plus(encoded_sequences_batch_padded),
)
# check 'longest' is unsensitive to a max length
encoded_sequences_batch_padded_1 = tokenizer.batch_encode_plus(table, sequences, padding=True)
encoded_sequences_batch_padded_2 = tokenizer.batch_encode_plus(
table, sequences, max_length=maximum_length + 10, padding="longest"
)
for key in encoded_sequences_batch_padded_1.keys():
self.assertListEqual(
encoded_sequences_batch_padded_1[key],
encoded_sequences_batch_padded_2[key],
)
# check 'no_padding' is unsensitive to a max length
encoded_sequences_batch_padded_1 = tokenizer.batch_encode_plus(table, sequences, padding=False)
encoded_sequences_batch_padded_2 = tokenizer.batch_encode_plus(
table, sequences, max_length=maximum_length + 10, padding=False
)
for key in encoded_sequences_batch_padded_1.keys():
self.assertListEqual(
encoded_sequences_batch_padded_1[key],
encoded_sequences_batch_padded_2[key],
)
@unittest.skip("batch_encode_plus does not handle overflowing tokens.")
def test_batch_encode_plus_overflowing_tokens(self):
pass
def test_batch_encode_plus_padding(self):
# Test that padded sequences are equivalent between batch_encode_plus and encode_plus
# Right padding tests
tokenizers = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
table = self.get_table(tokenizer, length=0)
sequences = [
"Testing batch encode plus",
"Testing batch encode plus with different sequence lengths",
"Testing batch encode plus with different sequence lengths correctly pads",
]
max_length = 100
# check correct behaviour if no pad_token_id exists and add it eventually
self._check_no_pad_token_padding(tokenizer, sequences)
encoded_sequences = [
tokenizer.encode_plus(table, sequence, max_length=max_length, padding="max_length")
for sequence in sequences
]
encoded_sequences_batch = tokenizer.batch_encode_plus(
table, sequences, max_length=max_length, padding="max_length"
)
self.assertListEqual(
encoded_sequences, self.convert_batch_encode_plus_format_to_encode_plus(encoded_sequences_batch)
)
# Left padding tests
tokenizers = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
tokenizer.padding_side = "left"
sequences = [
"Testing batch encode plus",
"Testing batch encode plus with different sequence lengths",
"Testing batch encode plus with different sequence lengths correctly pads",
]
max_length = 100
# check correct behaviour if no pad_token_id exists and add it eventually
self._check_no_pad_token_padding(tokenizer, sequences)
encoded_sequences = [
tokenizer.encode_plus(table, sequence, max_length=max_length, padding="max_length")
for sequence in sequences
]
encoded_sequences_batch = tokenizer.batch_encode_plus(
table, sequences, max_length=max_length, padding="max_length"
)
self.assertListEqual(
encoded_sequences, self.convert_batch_encode_plus_format_to_encode_plus(encoded_sequences_batch)
)
def test_padding_to_multiple_of(self):
tokenizers = self.get_tokenizers()
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
table = self.get_table(tokenizer, length=0)
if tokenizer.pad_token is None:
self.skipTest("No padding token.")
else:
empty_tokens = tokenizer(table, padding=True, pad_to_multiple_of=8)
normal_tokens = tokenizer(table, "This is a sample input", padding=True, pad_to_multiple_of=8)
for key, value in empty_tokens.items():
self.assertEqual(len(value) % 8, 0, f"BatchEncoding.{key} is not multiple of 8")
for key, value in normal_tokens.items():
self.assertEqual(len(value) % 8, 0, f"BatchEncoding.{key} is not multiple of 8")
normal_tokens = tokenizer(table, "This", pad_to_multiple_of=8)
for key, value in normal_tokens.items():
self.assertNotEqual(len(value) % 8, 0, f"BatchEncoding.{key} is not multiple of 8")
# Should also work with truncation
normal_tokens = tokenizer(table, "This", padding=True, truncation=True, pad_to_multiple_of=8)
for key, value in normal_tokens.items():
self.assertEqual(len(value) % 8, 0, f"BatchEncoding.{key} is not multiple of 8")
@unittest.skip("TAPAS cannot handle `prepare_for_model` without passing by `encode_plus` or `batch_encode_plus`")
def test_prepare_for_model(self):
pass
def test_tokenizer_slow_store_full_signature(self):
signature = inspect.signature(self.tokenizer_class.__init__)
tokenizer = self.get_tokenizer()
for parameter_name, parameter in signature.parameters.items():
if parameter.default != inspect.Parameter.empty:
self.assertIn(parameter_name, tokenizer.init_kwargs)
def test_special_tokens_mask_input_pairs(self):
tokenizers = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
sequence_0 = "Encode this."
empty_table = self.get_table(tokenizer, length=0)
table = self.get_table(tokenizer, length=10)
encoded_sequence = tokenizer.encode(empty_table, sequence_0, add_special_tokens=False)
encoded_sequence += tokenizer.encode(table, "", add_special_tokens=False)
encoded_sequence_dict = tokenizer.encode_plus(
table,
sequence_0,
add_special_tokens=True,
return_special_tokens_mask=True,
# add_prefix_space=False,
)
encoded_sequence_w_special = encoded_sequence_dict["input_ids"]
special_tokens_mask = encoded_sequence_dict["special_tokens_mask"]
self.assertEqual(len(special_tokens_mask), len(encoded_sequence_w_special))
filtered_sequence = [
(x if not special_tokens_mask[i] else None) for i, x in enumerate(encoded_sequence_w_special)
]
filtered_sequence = [x for x in filtered_sequence if x is not None]
self.assertEqual(encoded_sequence, filtered_sequence)
def test_special_tokens_mask(self):
tokenizers = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
table = self.get_table(tokenizer, length=0)
sequence_0 = "Encode this."
# Testing single inputs
encoded_sequence = tokenizer.encode(table, sequence_0, add_special_tokens=False)
encoded_sequence_dict = tokenizer.encode_plus(
table, sequence_0, add_special_tokens=True, return_special_tokens_mask=True
)
encoded_sequence_w_special = encoded_sequence_dict["input_ids"]
special_tokens_mask = encoded_sequence_dict["special_tokens_mask"]
self.assertEqual(len(special_tokens_mask), len(encoded_sequence_w_special))
filtered_sequence = [x for i, x in enumerate(encoded_sequence_w_special) if not special_tokens_mask[i]]
self.assertEqual(encoded_sequence, filtered_sequence)
def test_save_and_load_tokenizer(self):
# safety check on max_len default value so we are sure the test works
tokenizers = self.get_tokenizers()
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
self.assertNotEqual(tokenizer.model_max_length, 42)
# Now let's start the test
tokenizers = self.get_tokenizers()
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
# Isolate this from the other tests because we save additional tokens/etc
table = self.get_table(tokenizer, length=0)
tmpdirname = tempfile.mkdtemp()
sample_text = " He is very happy, UNwant\u00E9d,running"
before_tokens = tokenizer.encode(table, sample_text, add_special_tokens=False)
before_vocab = tokenizer.get_vocab()
tokenizer.save_pretrained(tmpdirname)
after_tokenizer = tokenizer.__class__.from_pretrained(tmpdirname)
after_tokens = after_tokenizer.encode(table, sample_text, add_special_tokens=False)
after_vocab = after_tokenizer.get_vocab()
self.assertListEqual(before_tokens, after_tokens)
self.assertDictEqual(before_vocab, after_vocab)
shutil.rmtree(tmpdirname)
@unittest.skip("Not implemented")
def test_right_and_left_truncation(self):
pass
def test_right_and_left_padding(self):
tokenizers = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
table = self.get_table(tokenizer, length=0)
sequence = "Sequence"
padding_size = 10
# check correct behaviour if no pad_token_id exists and add it eventually
self._check_no_pad_token_padding(tokenizer, sequence)
padding_idx = tokenizer.pad_token_id
# RIGHT PADDING - Check that it correctly pads when a maximum length is specified along with the padding flag set to True
tokenizer.padding_side = "right"
encoded_sequence = tokenizer.encode(table, sequence)
sequence_length = len(encoded_sequence)
padded_sequence = tokenizer.encode(
table, sequence, max_length=sequence_length + padding_size, padding="max_length"
)
padded_sequence_length = len(padded_sequence)
assert sequence_length + padding_size == padded_sequence_length
assert encoded_sequence + [padding_idx] * padding_size == padded_sequence
# LEFT PADDING - Check that it correctly pads when a maximum length is specified along with the padding flag set to True
tokenizer.padding_side = "left"
encoded_sequence = tokenizer.encode(table, sequence)
sequence_length = len(encoded_sequence)
padded_sequence = tokenizer.encode(
table, sequence, max_length=sequence_length + padding_size, padding="max_length"
)
padded_sequence_length = len(padded_sequence)
assert sequence_length + padding_size == padded_sequence_length
assert [padding_idx] * padding_size + encoded_sequence == padded_sequence
# RIGHT & LEFT PADDING - Check that nothing is done for 'longest' and 'no_padding'
encoded_sequence = tokenizer.encode(table, sequence)
sequence_length = len(encoded_sequence)
tokenizer.padding_side = "right"
padded_sequence_right = tokenizer.encode(table, sequence, padding=True)
padded_sequence_right_length = len(padded_sequence_right)
assert sequence_length == padded_sequence_right_length
assert encoded_sequence == padded_sequence_right
tokenizer.padding_side = "left"
padded_sequence_left = tokenizer.encode(table, sequence, padding="longest")
padded_sequence_left_length = len(padded_sequence_left)
assert sequence_length == padded_sequence_left_length
assert encoded_sequence == padded_sequence_left
tokenizer.padding_side = "right"
padded_sequence_right = tokenizer.encode(table, sequence)
padded_sequence_right_length = len(padded_sequence_right)
assert sequence_length == padded_sequence_right_length
assert encoded_sequence == padded_sequence_right
tokenizer.padding_side = "left"
padded_sequence_left = tokenizer.encode(table, sequence, padding=False)
padded_sequence_left_length = len(padded_sequence_left)
assert sequence_length == padded_sequence_left_length
assert encoded_sequence == padded_sequence_left
def test_token_type_ids(self):
tokenizers = self.get_tokenizers()
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
empty_table = self.get_table(tokenizer, length=0)
seq_0 = "Test this method."
# We want to have sequence 0 and sequence 1 are tagged
# respectively with 0 and 1 token_ids
# (regardless of whether the model use token type ids)
# We use this assumption in the QA pipeline among other place
output = tokenizer(empty_table, seq_0, return_token_type_ids=True)
# Assert that the token type IDs have the same length as the input IDs
self.assertEqual(len(output["token_type_ids"]), len(output["input_ids"]))
# Assert that each token type ID has 7 values
self.assertTrue(all(len(token_type_ids) == 7 for token_type_ids in output["token_type_ids"]))
# Do the same test as modeling common.
self.assertIn(0, output["token_type_ids"][0])
@require_torch
@slow
@require_scatter
def test_torch_encode_plus_sent_to_model(self):
import torch
from transformers import MODEL_MAPPING, TOKENIZER_MAPPING
MODEL_TOKENIZER_MAPPING = merge_model_tokenizer_mappings(MODEL_MAPPING, TOKENIZER_MAPPING)
tokenizers = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
if tokenizer.__class__ not in MODEL_TOKENIZER_MAPPING:
return
config_class, model_class = MODEL_TOKENIZER_MAPPING[tokenizer.__class__]
config = config_class()
if config.is_encoder_decoder or config.pad_token_id is None:
return
model = model_class(config)
# Make sure the model contains at least the full vocabulary size in its embedding matrix
is_using_common_embeddings = hasattr(model.get_input_embeddings(), "weight")
assert (
(model.get_input_embeddings().weight.shape[0] >= len(tokenizer))
if is_using_common_embeddings
else True
)
# Build sequence
first_ten_tokens = list(tokenizer.get_vocab().keys())[:10]
sequence = " ".join(first_ten_tokens)
table = self.get_table(tokenizer, length=0)
encoded_sequence = tokenizer.encode_plus(table, sequence, return_tensors="pt")
batch_encoded_sequence = tokenizer.batch_encode_plus(table, [sequence, sequence], return_tensors="pt")
# This should not fail
with torch.no_grad(): # saves some time
model(**encoded_sequence)
model(**batch_encoded_sequence)
@unittest.skip("TAPAS doesn't handle pre-tokenized inputs.")
def test_pretokenized_inputs(self):
pass
@slow
def test_tapas_truncation_integration_test(self):
data = {
"Actors": ["Brad Pitt", "Leonardo Di Caprio", "George Clooney"],
"Age": ["56", "45", "59"],
"Number of movies": ["87", "53", "69"],
"Date of birth": ["18 december 1963", "11 november 1974", "6 may 1961"],
}
queries = [
"When was Brad Pitt born?",
"Which actor appeared in the least number of movies?",
"What is the average number of movies?",
]
table = pd.DataFrame.from_dict(data)
tokenizer = TapasTokenizer.from_pretrained("lysandre/tapas-temporary-repo", model_max_length=512)
for i in range(12):
# The table cannot even encode the headers, so raise an error
with self.assertRaises(ValueError):
tokenizer.encode(table=table, query=queries[0], max_length=i, truncation="drop_rows_to_fit")
for i in range(12, 512):
new_encoded_inputs = tokenizer.encode(
table=table, query=queries[0], max_length=i, truncation="drop_rows_to_fit"
)
# Ensure that the input IDs are less than the max length defined.
self.assertLessEqual(len(new_encoded_inputs), i)
tokenizer.model_max_length = 20
new_encoded_inputs = tokenizer.encode(table=table, query=queries[0], truncation=True)
dropped_encoded_inputs = tokenizer.encode(table=table, query=queries[0], truncation="drop_rows_to_fit")
# Ensure that the input IDs are still truncated when no max_length is specified
self.assertListEqual(new_encoded_inputs, dropped_encoded_inputs)
self.assertLessEqual(len(new_encoded_inputs), 20)
@slow
def test_min_max_question_length(self):
data = {
"Actors": ["Brad Pitt", "Leonardo Di Caprio", "George Clooney"],
"Age": ["56", "45", "59"],
"Number of movies": ["87", "53", "69"],
"Date of birth": ["18 december 1963", "11 november 1974", "6 may 1961"],
}
queries = "When was Brad Pitt born?"
table = pd.DataFrame.from_dict(data)
# test max_question_length
tokenizer = TapasTokenizer.from_pretrained("lysandre/tapas-temporary-repo", max_question_length=2)
encoding = tokenizer(table=table, queries=queries)
# query should not be tokenized as it's longer than the specified max_question_length
expected_results = [101, 102]
self.assertListEqual(encoding.input_ids[:2], expected_results)
# test min_question_length
tokenizer = TapasTokenizer.from_pretrained("lysandre/tapas-temporary-repo", min_question_length=30)
encoding = tokenizer(table=table, queries=queries)
# query should not be tokenized as it's shorter than the specified min_question_length
expected_results = [101, 102]
self.assertListEqual(encoding.input_ids[:2], expected_results)
@is_pt_tf_cross_test
def test_batch_encode_plus_tensors(self):
tokenizers = self.get_tokenizers(do_lower_case=False)
for tokenizer in tokenizers:
with self.subTest(f"{tokenizer.__class__.__name__}"):
sequences = [
"Testing batch encode plus",
"Testing batch encode plus with different sequence lengths",
"Testing batch encode plus with different sequence lengths correctly pads",
]
table = self.get_table(tokenizer, length=0)
# A Tensor cannot be build by sequences which are not the same size
self.assertRaises(ValueError, tokenizer.batch_encode_plus, table, sequences, return_tensors="pt")
self.assertRaises(ValueError, tokenizer.batch_encode_plus, table, sequences, return_tensors="tf")
if tokenizer.pad_token_id is None:
self.assertRaises(
ValueError,
tokenizer.batch_encode_plus,
table,
sequences,
padding=True,
return_tensors="pt",
)
self.assertRaises(
ValueError,
tokenizer.batch_encode_plus,
table,
sequences,
padding="longest",
return_tensors="tf",
)
else:
pytorch_tensor = tokenizer.batch_encode_plus(table, sequences, padding=True, return_tensors="pt")
tensorflow_tensor = tokenizer.batch_encode_plus(
table, sequences, padding="longest", return_tensors="tf"
)
encoded_sequences = tokenizer.batch_encode_plus(table, sequences, padding=True)
for key in encoded_sequences.keys():
pytorch_value = pytorch_tensor[key].tolist()
tensorflow_value = tensorflow_tensor[key].numpy().tolist()
encoded_value = encoded_sequences[key]
self.assertEqual(pytorch_value, tensorflow_value, encoded_value)
@slow
def test_tapas_integration_test(self):
data = {
"Actors": ["Brad Pitt", "Leonardo Di Caprio", "George Clooney"],
"Age": ["56", "45", "59"],
"Number of movies": ["87", "53", "69"],
"Date of birth": ["18 december 1963", "11 november 1974", "6 may 1961"],
}
queries = [
"When was Brad Pitt born?",
"Which actor appeared in the least number of movies?",
"What is the average number of movies?",
]
table = pd.DataFrame.from_dict(data)
tokenizer = TapasTokenizer.from_pretrained("google/tapas-base-finetuned-wtq", model_max_length=512)
# fmt: off
expected_results = {'input_ids':[101,2043,2001,8226,15091,2141,1029,102,5889,2287,2193,1997,5691,3058,1997,4182,8226,15091,5179,6584,2324,2285,3699,14720,4487,6178,9488,3429,5187,2340,2281,3326,2577,18856,7828,3240,5354,6353,1020,2089,3777],'attention_mask':[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],'token_type_ids':[[0,0,0,0,0,0,0],[0,0,0,0,0,0,0],[0,0,0,0,0,0,0],[0,0,0,0,0,0,0],[0,0,0,0,0,0,0],[0,0,0,0,0,0,0],[0,0,0,0,0,0,0],[0,0,0,0,0,0,0],[1,1,0,0,0,0,0],[1,2,0,0,0,0,0],[1,3,0,0,0,0,0],[1,3,0,0,0,0,0],[1,3,0,0,0,0,0],[1,4,0,0,0,0,0],[1,4,0,0,0,0,0],[1,4,0,0,0,0,0],[1,1,1,0,0,0,0],[1,1,1,0,0,0,0],[1,2,1,0,2,2,0],[1,3,1,0,3,1,0],[1,4,1,0,2,2,0],[1,4,1,0,2,2,0],[1,4,1,0,2,2,0],[1,1,2,0,0,0,0],[1,1,2,0,0,0,0],[1,1,2,0,0,0,0],[1,1,2,0,0,0,0],[1,2,2,0,1,3,0],[1,3,2,0,1,3,0],[1,4,2,0,3,1,0],[1,4,2,0,3,1,0],[1,4,2,0,3,1,0],[1,1,3,0,0,0,0],[1,1,3,0,0,0,0],[1,1,3,0,0,0,0],[1,1,3,0,0,0,0],[1,2,3,0,3,1,0],[1,3,3,0,2,2,0],[1,4,3,0,1,3,0],[1,4,3,0,1,3,0],[1,4,3,0,1,3,0]]} # noqa: E231
# fmt: on
new_encoded_inputs = tokenizer.encode_plus(table=table, query=queries[0])
self.assertDictEqual(dict(new_encoded_inputs), expected_results)
@slow
def test_full_tokenizer(self):
data = [
["Pos", "No", "Driver", "Team", "Laps", "Time/Retired", "Grid", "Points"],
["1", "32", "Patrick Carpentier", "Team Player's", "87", "1:48:11.023", "1", "22"],
["2", "1", "Bruno Junqueira", "Newman/Haas Racing", "87", "+0.8 secs", "2", "17"],
["3", "3", "Paul Tracy", "Team Player's", "87", "+28.6 secs", "3", "14"],
["4", "9", "Michel Jourdain, Jr.", "Team Rahal", "87", "+40.8 secs", "13", "12"],
["5", "34", "Mario Haberfeld", "Mi-Jack Conquest Racing", "87", "+42.1 secs", "6", "10"],
["6", "20", "Oriol Servia", "Patrick Racing", "87", "+1:00.2", "10", "8"],
["7", "51", "Adrian Fernandez", "Fernandez Racing", "87", "+1:01.4", "5", "6"],
["8", "12", "Jimmy Vasser", "American Spirit Team Johansson", "87", "+1:01.8", "8", "5"],
["9", "7", "Tiago Monteiro", "Fittipaldi-Dingman Racing", "86", "+ 1 Lap", "15", "4"],
["10", "55", "Mario Dominguez", "Herdez Competition", "86", "+ 1 Lap", "11", "3"],
["11", "27", "Bryan Herta", "PK Racing", "86", "+ 1 Lap", "12", "2"],
["12", "31", "Ryan Hunter-Reay", "American Spirit Team Johansson", "86", "+ 1 Lap", "17", "1"],
["13", "19", "Joel Camathias", "Dale Coyne Racing", "85", "+ 2 Laps", "18", "0"],
["14", "33", "Alex Tagliani", "Rocketsports Racing", "85", "+ 2 Laps", "14", "0"],
["15", "4", "Roberto Moreno", "Herdez Competition", "85", "+ 2 Laps", "9", "0"],
["16", "11", "Geoff Boss", "Dale Coyne Racing", "83", "Mechanical", "19", "0"],
["17", "2", "Sebastien Bourdais", "Newman/Haas Racing", "77", "Mechanical", "4", "0"],
["18", "15", "Darren Manning", "Walker Racing", "12", "Mechanical", "7", "0"],
["19", "5", "Rodolfo Lavin", "Walker Racing", "10", "Mechanical", "16", "0"],
]
query = "what were the drivers names?"
table = pd.DataFrame.from_records(data[1:], columns=data[0])
tokenizer = TapasTokenizer.from_pretrained("google/tapas-base-finetuned-wtq", model_max_length=512)
model_inputs = tokenizer(table, query, padding="max_length")
input_ids = model_inputs["input_ids"]
token_type_ids = np.array(model_inputs["token_type_ids"])
segment_ids = token_type_ids[:, 0]
column_ids = token_type_ids[:, 1]
row_ids = token_type_ids[:, 2]
# fmt: off
expected_results = {'input_ids':[101,2054,2020,1996,6853,3415,1029,102,13433,2015,2053,4062,2136,10876,2051,1013,3394,8370,2685,1015,3590,4754,29267,4765,3771,2136,2447,1005,1055,6584,1015,1024,4466,1024,2340,1012,6185,2509,1015,2570,1016,1015,10391,12022,4226,7895,10625,1013,22996,3868,6584,1009,1014,1012,1022,10819,2015,1016,2459,1017,1017,2703,10555,2136,2447,1005,1055,6584,1009,2654,1012,1020,10819,2015,1017,2403,1018,1023,8709,8183,3126,21351,2078,1010,3781,1012,2136,10958,8865,6584,1009,2871,1012,1022,10819,2015,2410,2260,1019,4090,7986,5292,5677,8151,2771,1011,2990,9187,3868,6584,1009,4413,1012,1015,10819,2015,1020,2184,1020,2322,2030,20282,14262,9035,4754,3868,6584,1009,1015,1024,4002,1012,1016,2184,1022,1021,4868,7918,12023,12023,3868,6584,1009,1015,1024,5890,1012,1018,1019,1020,1022,2260,5261,12436,18116,2137,4382,2136,26447,6584,1009,1015,1024,5890,1012,1022,1022,1019,1023,1021,27339,3995,10125,9711,4906,25101,24657,1011,22033,2386,3868,6564,1009,1015,5001,2321,1018,2184,4583,7986,14383,2075,29488,14906,9351,2971,6564,1009,1015,5001,2340,1017,2340,2676,8527,2014,2696,1052,2243,3868,6564,1009,1015,5001,2260,1016,2260,2861,4575,4477,1011,2128,4710,2137,4382,2136,26447,6564,1009,1015,5001,2459,1015,2410,2539,8963,11503,25457,3022,8512,2522,9654,3868,5594,1009,1016,10876,2324,1014,2403,3943,4074,6415,15204,2072,12496,25378,3868,5594,1009,1016,10876,2403,1014,2321,1018,10704,17921,14906,9351,2971,5594,1009,1016,10876,1023,1014,2385,2340,14915,5795,8512,2522,9654,3868,6640,6228,2539,1014,2459,1016,28328,8945,3126,21351,2015,10625,1013,22996,3868,6255,6228,1018,1014,2324,2321,12270,11956,5232,3868,2260,6228,1021,1014,2539,1019,8473,28027,2080,2474,6371,5232,3868,2184,6228,2385,1014,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],'column_ids':[0,0,0,0,0,0,0,0,1,1,2,3,4,5,6,6,6,7,8,1,2,3,3,3,3,4,4,4,4,5,6,6,6,6,6,6,6,6,7,8,1,2,3,3,3,3,4,4,4,4,5,6,6,6,6,6,6,7,8,1,2,3,3,4,4,4,4,5,6,6,6,6,6,6,7,8,1,2,3,3,3,3,3,3,3,3,4,4,4,5,6,6,6,6,6,6,7,8,1,2,3,3,3,3,4,4,4,4,4,5,6,6,6,6,6,6,7,8,1,2,3,3,3,3,4,4,5,6,6,6,6,6,6,7,8,1,2,3,3,4,4,5,6,6,6,6,6,6,7,8,1,2,3,3,3,4,4,4,4,5,6,6,6,6,6,6,7,8,1,2,3,3,3,3,4,4,4,4,4,4,4,5,6,6,6,7,8,1,2,3,3,3,3,4,4,4,5,6,6,6,7,8,1,2,3,3,3,4,4,4,5,6,6,6,7,8,1,2,3,3,3,3,3,4,4,4,4,5,6,6,6,7,8,1,2,3,3,3,3,4,4,4,4,5,6,6,6,7,8,1,2,3,3,3,3,4,4,4,5,6,6,6,7,8,1,2,3,3,4,4,4,5,6,6,6,7,8,1,2,3,3,4,4,4,4,5,6,7,8,1,2,3,3,3,3,3,4,4,4,4,5,6,7,8,1,2,3,3,4,4,5,6,7,8,1,2,3,3,3,3,3,4,4,5,6,7,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],'row_ids':[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,11,11,11,11,11,11,11,11,11,11,11,11,11,11,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,15,15,15,15,15,15,15,15,15,15,15,15,15,16,16,16,16,16,16,16,16,16,16,16,16,17,17,17,17,17,17,17,17,17,17,17,17,17,17,17,18,18,18,18,18,18,18,18,18,18,19,19,19,19,19,19,19,19,19,19,19,19,19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],'segment_ids':[0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]} # noqa: E231
# fmt: on
self.assertListEqual(input_ids, expected_results["input_ids"])
self.assertListEqual(segment_ids.tolist(), expected_results["segment_ids"])
self.assertListEqual(column_ids.tolist(), expected_results["column_ids"])
self.assertListEqual(row_ids.tolist(), expected_results["row_ids"])
@unittest.skip("Skip this test while all models are still to be uploaded.")
def test_pretrained_model_lists(self):
pass
@unittest.skip("Doesn't support another framework than PyTorch")
def test_np_encode_plus_sent_to_model(self):
pass
| _punctuation(self): |
manage.py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "geekshop.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == "__main__":
| main() |
|
create-api-client.dto.js | "use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
Object.defineProperty(exports, "__esModule", { value: true });
const class_validator_1 = require("class-validator");
class | {
}
__decorate([
class_validator_1.IsNumber(),
__metadata("design:type", Number)
], CreateApiClientDto.prototype, "value", void 0);
__decorate([
class_validator_1.IsString(),
__metadata("design:type", String)
], CreateApiClientDto.prototype, "text_value", void 0);
__decorate([
class_validator_1.IsString(),
__metadata("design:type", String)
], CreateApiClientDto.prototype, "name", void 0);
exports.CreateApiClientDto = CreateApiClientDto;
//# sourceMappingURL=create-api-client.dto.js.map | CreateApiClientDto |
stanfordnlp_processor.py | # Copyright 2019 The Forte Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import List, Any, Dict
import stanza
from forte.common.configuration import Config
from forte.common.resources import Resources
from forte.data.data_pack import DataPack
from forte.processors.base import PackProcessor
from ft.onto.base_ontology import Token, Sentence, Dependency
__all__ = [
"StandfordNLPProcessor",
]
class StandfordNLPProcessor(PackProcessor):
def __init__(self):
super().__init__()
self.nlp = None
self.processors = set()
def | (self):
stanza.download(self.configs.lang, self.configs.dir)
self.processors = set(self.configs.processors.split(','))
# pylint: disable=unused-argument
def initialize(self, resources: Resources, configs: Config):
super().initialize(resources, configs)
self.set_up()
self.nlp = stanza.Pipeline(
lang=self.configs.lang,
dir=self.configs.dir,
use_gpu=self.configs.use_gpu,
processors=self.configs.processors,
)
@classmethod
def default_configs(cls) -> Dict[str, Any]:
"""
This defines a basic config structure for StanfordNLP.
:return:
"""
config = super().default_configs()
config.update(
{
'processors': 'tokenize,pos,lemma,depparse',
'lang': 'en',
# Language code for the language to build the Pipeline
'use_gpu': False,
'dir': '.',
})
return config
def _process(self, input_pack: DataPack):
doc = input_pack.text
if len(doc) == 0:
logging.warning("Find empty text in doc.")
# sentence parsing
sentences = self.nlp(doc).sentences
# Iterating through stanfordnlp sentence objects
for sentence in sentences:
Sentence(input_pack, sentence.tokens[0].start_char,
sentence.tokens[-1].end_char)
tokens: List[Token] = []
if "tokenize" in self.processors:
# Iterating through stanfordnlp word objects
for word in sentence.words:
misc = word.misc.split('|')
t_start = -1
t_end = -1
for m in misc:
k, v = m.split('=')
if k == 'start_char':
t_start = int(v)
elif k == 'end_char':
t_end = int(v)
if t_start < 0 or t_end < 0:
raise ValueError(
"Cannot determine word start or end for "
"stanfordnlp.")
token = Token(input_pack, t_start, t_end)
if "pos" in self.processors:
token.pos = word.pos
token.ud_xpos = word.xpos
if "lemma" in self.processors:
token.lemma = word.lemma
tokens.append(token)
# For each sentence, get the dependency relations among tokens
if "depparse" in self.processors:
# Iterating through token entries in current sentence
for token, word in zip(tokens, sentence.words):
child = token # current token
parent = tokens[word.head - 1] # Head token
relation_entry = Dependency(input_pack, parent, child)
relation_entry.rel_type = word.deprel
| set_up |
ingress_config.go | package mesh
import (
keptnv2 "github.com/keptn/go-utils/pkg/lib/v0_2_0"
"os"
"strings"
)
// GetIngressHostnameSuffix returns the ingress hostname suffix
func GetIngressHostnameSuffix() string {
if os.Getenv("INGRESS_HOSTNAME_SUFFIX") != "" {
return os.Getenv("INGRESS_HOSTNAME_SUFFIX")
}
return "svc.cluster.local"
}
// GetIngressProtocol returns the ingress protocol
func GetIngressProtocol() string {
if os.Getenv("INGRESS_PROTOCOL") != "" |
return "http"
}
// GetIngressPort returns the ingress port
func GetIngressPort() string {
if os.Getenv("INGRESS_PORT") != "" {
return os.Getenv("INGRESS_PORT")
}
return "80"
}
// GetIngressGateway returns the ingress gateway
func GetIngressGateway() string {
if os.Getenv("ISTIO_GATEWAY") != "" {
return os.Getenv("ISTIO_GATEWAY")
}
return "public-gateway.istio-system"
}
// GetLocalDeploymentURI returns URIs where a service is accessible from within the cluster
func GetLocalDeploymentURI(event keptnv2.EventData, port string) []string {
return []string{"http://" + event.Service + "." + event.Project + "-" + event.Stage + ":" + port}
}
// GetPublicDeploymentURI returns URIs where a service is exposed
func GetPublicDeploymentURI(event keptnv2.EventData) []string {
publicHostName := GetPublicDeploymentHostNameTemplate()
publicHostName = strings.ReplaceAll(publicHostName, "${INGRESS_PROTOCOL}", GetIngressProtocol())
publicHostName = strings.ReplaceAll(publicHostName, "${SERVICE}", event.Service)
publicHostName = strings.ReplaceAll(publicHostName, "${PROJECT}", event.Project)
publicHostName = strings.ReplaceAll(publicHostName, "${STAGE}", event.Stage)
publicHostName = strings.ReplaceAll(publicHostName, "${INGRESS_HOSTNAME_SUFFIX}", GetIngressHostnameSuffix())
publicHostName = strings.ReplaceAll(publicHostName, "${INGRESS_PORT}", GetIngressPort())
return []string{publicHostName}
}
// GetPublicDeploymentHostNameTemplate returns the HostName of the service
func GetPublicDeploymentHostNameTemplate() string {
hostNameTemplate := os.Getenv("HOSTNAME_TEMPLATE")
if hostNameTemplate == "" {
return "${INGRESS_PROTOCOL}://${SERVICE}.${PROJECT}-${STAGE}.${INGRESS_HOSTNAME_SUFFIX}:${INGRESS_PORT}"
}
return strings.ToUpper(hostNameTemplate)
}
| {
return strings.ToLower(os.Getenv("INGRESS_PROTOCOL"))
} |
blackhole.go | package src
import (
pluginDriver "github.com/brokercap/Bifrost/plugin/driver"
)
const VERSION = "v1.6.0"
const BIFROST_VERION = "v1.6.0"
func init(){
pluginDriver.Register("blackhole",NewConn,VERSION,BIFROST_VERION)
}
func NewConn() pluginDriver.Driver |
type Conn struct {
pluginDriver.PluginDriverInterface
}
| {
return &Conn{}
} |
beaumont_isd.py | from . import base
from . import mixins
from datetime import date
class TransformedRecord(
mixins.GenericCompensationMixin,
mixins.GenericDepartmentMixin, mixins.GenericIdentifierMixin,
mixins.GenericJobTitleMixin, mixins.GenericPersonMixin,
mixins.MembershipMixin, mixins.OrganizationMixin, mixins.PostMixin,
mixins.RaceMixin, mixins.LinkMixin, base.BaseTransformedRecord):
MAP = {
'last_name': 'PER_LAST_NAME',
'first_name': 'PER_FIRST_NAME',
'middle_name': 'PER_MIDDLE_NAME',
'department': 'Organization',
'job_title': 'ROLE_NAME',
'hire_date': 'EMP_HIRE_DT',
'compensation': 'EMP_ASGN_PAY_HIST_A_NRML_PAY',
'gender': 'PER_GENDER',
'nationality': 'PRIMARY_ETHNICITY_CODE',
'employee_type': 'Status'
}
NAME_FIELDS = ('first_name', 'last_name', )
ORGANIZATION_NAME = 'Beaumont ISD'
ORGANIZATION_CLASSIFICATION = 'School District'
DATE_PROVIDED = date(2018, 6, 14)
# The URL to find the raw data in our S3 bucket.
URL = ('https://s3.amazonaws.com/raw.texastribune.org/beaumont_isd/'
'salaries/2018-06/foia.xlsx')
race_map = {
'AFRICAN AM': 'African American',
'WHITE': 'White',
'HISPANIC': 'Hispanic',
'ASIAN': 'Asian',
'AMER IND': 'American Indian'
}
# This is how the loader checks for valid people. Defaults to checking to see if `last_name` is empty.
@property
def is_valid(self):
# Adjust to return False on invalid fields. For example:
return self.last_name.strip() != ''
@property
def compensation_type(self):
employee_type = self.employee_type
if employee_type == 'Part-Time':
return 'PT'
if employee_type == '':
return 'FT'
return 'FT'
@property
def description(self):
employee_type = self.employee_type
if employee_type == '':
return "Yearly salary"
if employee_type == 'Part-Time':
return "Part-time, hourly rate"
return "Yearly salary"
@property
def hire_date(self):
raw_date = self.get_mapped_value('hire_date')[:10]
return raw_date
@property
def race(self):
return {
'name': self.race_map[self.nationality.strip()]
}
@property
def person(self):
name = self.get_name()
r = {
'family_name': name.last, | 'given_name': name.first,
'additional_name': name.middle,
'name': unicode(name),
'gender': self.gender,
}
return r
transform = base.transform_factory(TransformedRecord) | |
mod.rs | //! Utilities for formatting and printing strings.
// ignore-tidy-undocumented-unsafe
#![stable(feature = "rust1", since = "1.0.0")]
use crate::cell::{Cell, Ref, RefCell, RefMut, UnsafeCell};
use crate::marker::PhantomData;
use crate::mem;
use crate::num::flt2dec;
use crate::ops::Deref;
use crate::result;
use crate::slice;
use crate::str;
mod builders;
mod float;
mod num;
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
/// Possible alignments returned by `Formatter::align`
#[derive(Debug)]
pub enum Alignment {
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
/// Indication that contents should be left-aligned.
Left,
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
/// Indication that contents should be right-aligned.
Right,
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
/// Indication that contents should be center-aligned.
Center,
}
#[stable(feature = "debug_builders", since = "1.2.0")]
pub use self::builders::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple};
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "0")]
#[doc(hidden)]
pub mod rt {
pub mod v1;
}
/// The type returned by formatter methods.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// #[derive(Debug)]
/// struct Triangle {
/// a: f32,
/// b: f32,
/// c: f32
/// }
///
/// impl fmt::Display for Triangle {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// write!(f, "({}, {}, {})", self.a, self.b, self.c)
/// }
/// }
///
/// let pythagorean_triple = Triangle { a: 3.0, b: 4.0, c: 5.0 };
///
/// assert_eq!(format!("{}", pythagorean_triple), "(3, 4, 5)");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub type Result = result::Result<(), Error>;
/// The error type which is returned from formatting a message into a stream.
///
/// This type does not support transmission of an error other than that an error
/// occurred. Any extra information must be arranged to be transmitted through
/// some other means.
///
/// An important thing to remember is that the type `fmt::Error` should not be
/// confused with [`std::io::Error`] or [`std::error::Error`], which you may also
/// have in scope.
///
/// [`std::io::Error`]: ../../std/io/struct.Error.html
/// [`std::error::Error`]: ../../std/error/trait.Error.html
///
/// # Examples
///
/// ```rust
/// use std::fmt::{self, write};
///
/// let mut output = String::new();
/// if let Err(fmt::Error) = write(&mut output, format_args!("Hello {}!", "world")) {
/// panic!("An error occurred");
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Error;
/// A collection of methods that are required to format a message into a stream.
///
/// This trait is the type which this modules requires when formatting
/// information. This is similar to the standard library's [`io::Write`] trait,
/// but it is only intended for use in libcore.
///
/// This trait should generally not be implemented by consumers of the standard
/// library. The [`write!`] macro accepts an instance of [`io::Write`], and the
/// [`io::Write`] trait is favored over implementing this trait.
///
/// [`write!`]: ../../std/macro.write.html
/// [`io::Write`]: ../../std/io/trait.Write.html
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Write {
/// Writes a string slice into this writer, returning whether the write
/// succeeded.
///
/// This method can only succeed if the entire string slice was successfully
/// written, and this method will not return until all data has been
/// written or an error occurs.
///
/// # Errors
///
/// This function will return an instance of [`Error`] on error.
///
/// [`Error`]: struct.Error.html
///
/// # Examples
///
/// ```
/// use std::fmt::{Error, Write};
///
/// fn writer<W: Write>(f: &mut W, s: &str) -> Result<(), Error> {
/// f.write_str(s)
/// }
///
/// let mut buf = String::new();
/// writer(&mut buf, "hola").unwrap();
/// assert_eq!(&buf, "hola");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn write_str(&mut self, s: &str) -> Result;
/// Writes a [`char`] into this writer, returning whether the write succeeded.
///
/// A single [`char`] may be encoded as more than one byte.
/// This method can only succeed if the entire byte sequence was successfully
/// written, and this method will not return until all data has been
/// written or an error occurs.
///
/// # Errors
///
/// This function will return an instance of [`Error`] on error.
///
/// [`char`]: ../../std/primitive.char.html
/// [`Error`]: struct.Error.html
///
/// # Examples
///
/// ```
/// use std::fmt::{Error, Write};
///
/// fn writer<W: Write>(f: &mut W, c: char) -> Result<(), Error> {
/// f.write_char(c)
/// }
///
/// let mut buf = String::new();
/// writer(&mut buf, 'a').unwrap();
/// writer(&mut buf, 'b').unwrap();
/// assert_eq!(&buf, "ab");
/// ```
#[stable(feature = "fmt_write_char", since = "1.1.0")]
fn write_char(&mut self, c: char) -> Result {
self.write_str(c.encode_utf8(&mut [0; 4]))
}
/// Glue for usage of the [`write!`] macro with implementors of this trait.
///
/// This method should generally not be invoked manually, but rather through
/// the [`write!`] macro itself.
///
/// [`write!`]: ../../std/macro.write.html
///
/// # Examples
///
/// ```
/// use std::fmt::{Error, Write};
///
/// fn writer<W: Write>(f: &mut W, s: &str) -> Result<(), Error> {
/// f.write_fmt(format_args!("{}", s))
/// }
///
/// let mut buf = String::new();
/// writer(&mut buf, "world").unwrap();
/// assert_eq!(&buf, "world");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn write_fmt(mut self: &mut Self, args: Arguments<'_>) -> Result {
write(&mut self, args)
}
}
#[stable(feature = "fmt_write_blanket_impl", since = "1.4.0")]
impl<W: Write + ?Sized> Write for &mut W {
fn write_str(&mut self, s: &str) -> Result {
(**self).write_str(s)
}
fn write_char(&mut self, c: char) -> Result {
(**self).write_char(c)
}
fn write_fmt(&mut self, args: Arguments<'_>) -> Result {
(**self).write_fmt(args)
}
}
/// Configuration for formatting.
///
/// A `Formatter` represents various options related to formatting. Users do not
/// construct `Formatter`s directly; a mutable reference to one is passed to
/// the `fmt` method of all formatting traits, like [`Debug`] and [`Display`].
///
/// To interact with a `Formatter`, you'll call various methods to change the
/// various options related to formatting. For examples, please see the
/// documentation of the methods defined on `Formatter` below.
///
/// [`Debug`]: trait.Debug.html
/// [`Display`]: trait.Display.html
#[allow(missing_debug_implementations)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Formatter<'a> {
flags: u32,
fill: char,
align: rt::v1::Alignment,
width: Option<usize>,
precision: Option<usize>,
buf: &'a mut (dyn Write + 'a),
curarg: slice::Iter<'a, ArgumentV1<'a>>,
args: &'a [ArgumentV1<'a>],
}
// NB. Argument is essentially an optimized partially applied formatting function,
// equivalent to `exists T.(&T, fn(&T, &mut Formatter<'_>) -> Result`.
struct Void {
_priv: (),
/// Erases all oibits, because `Void` erases the type of the object that
/// will be used to produce formatted output. Since we do not know what
/// oibits the real types have (and they can have any or none), we need to
/// take the most conservative approach and forbid all oibits.
///
/// It was added after #45197 showed that one could share a `!Sync`
/// object across threads by passing it into `format_args!`.
_oibit_remover: PhantomData<*mut dyn Fn()>,
}
/// This struct represents the generic "argument" which is taken by the Xprintf
/// family of functions. It contains a function to format the given value. At
/// compile time it is ensured that the function and the value have the correct
/// types, and then this struct is used to canonicalize arguments to one type.
#[derive(Copy, Clone)]
#[allow(missing_debug_implementations)]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "0")]
#[doc(hidden)]
pub struct ArgumentV1<'a> {
value: &'a Void,
formatter: fn(&Void, &mut Formatter<'_>) -> Result,
}
impl<'a> ArgumentV1<'a> {
#[inline(never)]
fn show_usize(x: &usize, f: &mut Formatter<'_>) -> Result {
Display::fmt(x, f)
}
#[doc(hidden)]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "0")]
pub fn new<'b, T>(x: &'b T, f: fn(&T, &mut Formatter<'_>) -> Result) -> ArgumentV1<'b> {
unsafe { ArgumentV1 { formatter: mem::transmute(f), value: mem::transmute(x) } }
}
#[doc(hidden)]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "0")]
pub fn from_usize(x: &usize) -> ArgumentV1<'_> {
ArgumentV1::new(x, ArgumentV1::show_usize)
}
fn as_usize(&self) -> Option<usize> {
if self.formatter as usize == ArgumentV1::show_usize as usize {
Some(unsafe { *(self.value as *const _ as *const usize) })
} else {
None
}
}
}
// flags available in the v1 format of format_args
#[derive(Copy, Clone)]
enum FlagV1 {
SignPlus,
SignMinus,
Alternate,
SignAwareZeroPad,
DebugLowerHex,
DebugUpperHex,
}
impl<'a> Arguments<'a> {
/// When using the format_args!() macro, this function is used to generate the
/// Arguments structure.
#[doc(hidden)]
#[inline]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "0")]
pub fn new_v1(pieces: &'a [&'a str], args: &'a [ArgumentV1<'a>]) -> Arguments<'a> {
Arguments { pieces, fmt: None, args }
}
/// This function is used to specify nonstandard formatting parameters.
/// The `pieces` array must be at least as long as `fmt` to construct
/// a valid Arguments structure. Also, any `Count` within `fmt` that is
/// `CountIsParam` or `CountIsNextParam` has to point to an argument
/// created with `argumentusize`. However, failing to do so doesn't cause
/// unsafety, but will ignore invalid .
#[doc(hidden)]
#[inline]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "0")]
pub fn new_v1_formatted(
pieces: &'a [&'a str],
args: &'a [ArgumentV1<'a>],
fmt: &'a [rt::v1::Argument],
) -> Arguments<'a> {
Arguments { pieces, fmt: Some(fmt), args }
}
/// Estimates the length of the formatted text.
///
/// This is intended to be used for setting initial `String` capacity
/// when using `format!`. Note: this is neither the lower nor upper bound.
#[doc(hidden)]
#[inline]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "0")]
pub fn estimated_capacity(&self) -> usize {
let pieces_length: usize = self.pieces.iter().map(|x| x.len()).sum();
if self.args.is_empty() {
pieces_length
} else if self.pieces[0] == "" && pieces_length < 16 {
// If the format string starts with an argument,
// don't preallocate anything, unless length
// of pieces is significant.
0
} else {
// There are some arguments, so any additional push
// will reallocate the string. To avoid that,
// we're "pre-doubling" the capacity here.
pieces_length.checked_mul(2).unwrap_or(0)
}
}
}
/// This structure represents a safely precompiled version of a format string
/// and its arguments. This cannot be generated at runtime because it cannot
/// safely be done, so no constructors are given and the fields are private
/// to prevent modification.
///
/// The [`format_args!`] macro will safely create an instance of this structure.
/// The macro validates the format string at compile-time so usage of the
/// [`write`] and [`format`] functions can be safely performed.
///
/// You can use the `Arguments<'a>` that [`format_args!`] returns in `Debug`
/// and `Display` contexts as seen below. The example also shows that `Debug`
/// and `Display` format to the same thing: the interpolated format string
/// in `format_args!`.
///
/// ```rust
/// let debug = format!("{:?}", format_args!("{} foo {:?}", 1, 2));
/// let display = format!("{}", format_args!("{} foo {:?}", 1, 2));
/// assert_eq!("1 foo 2", display);
/// assert_eq!(display, debug);
/// ```
///
/// [`format_args!`]: ../../std/macro.format_args.html
/// [`format`]: ../../std/fmt/fn.format.html
/// [`write`]: ../../std/fmt/fn.write.html
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Copy, Clone)]
pub struct Arguments<'a> {
// Format string pieces to print.
pieces: &'a [&'a str],
// Placeholder specs, or `None` if all specs are default (as in "{}{}").
fmt: Option<&'a [rt::v1::Argument]>,
// Dynamic arguments for interpolation, to be interleaved with string
// pieces. (Every argument is preceded by a string piece.)
args: &'a [ArgumentV1<'a>],
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for Arguments<'_> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result {
Display::fmt(self, fmt)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for Arguments<'_> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result {
write(fmt.buf, *self)
}
}
/// `?` formatting.
///
/// `Debug` should format the output in a programmer-facing, debugging context.
///
/// Generally speaking, you should just `derive` a `Debug` implementation.
///
/// When used with the alternate format specifier `#?`, the output is pretty-printed.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// This trait can be used with `#[derive]` if all fields implement `Debug`. When
/// `derive`d for structs, it will use the name of the `struct`, then `{`, then a
/// comma-separated list of each field's name and `Debug` value, then `}`. For
/// `enum`s, it will use the name of the variant and, if applicable, `(`, then the
/// `Debug` values of the fields, then `)`.
///
/// # Examples
///
/// Deriving an implementation:
///
/// ```
/// #[derive(Debug)]
/// struct Point {
/// x: i32,
/// y: i32,
/// }
///
/// let origin = Point { x: 0, y: 0 };
///
/// assert_eq!(format!("The origin is: {:?}", origin), "The origin is: Point { x: 0, y: 0 }");
/// ```
///
/// Manually implementing:
///
/// ```
/// use std::fmt;
///
/// struct Point {
/// x: i32,
/// y: i32,
/// }
///
/// impl fmt::Debug for Point {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// f.debug_struct("Point")
/// .field("x", &self.x)
/// .field("y", &self.y)
/// .finish()
/// }
/// }
///
/// let origin = Point { x: 0, y: 0 };
///
/// assert_eq!(format!("The origin is: {:?}", origin), "The origin is: Point { x: 0, y: 0 }");
/// ```
///
/// There are a number of helper methods on the [`Formatter`] struct to help you with manual
/// implementations, such as [`debug_struct`].
///
/// `Debug` implementations using either `derive` or the debug builder API
/// on [`Formatter`] support pretty-printing using the alternate flag: `{:#?}`.
///
/// [`debug_struct`]: ../../std/fmt/struct.Formatter.html#method.debug_struct
/// [`Formatter`]: ../../std/fmt/struct.Formatter.html
///
/// Pretty-printing with `#?`:
///
/// ```
/// #[derive(Debug)]
/// struct Point {
/// x: i32,
/// y: i32,
/// }
///
/// let origin = Point { x: 0, y: 0 };
///
/// assert_eq!(format!("The origin is: {:#?}", origin),
/// "The origin is: Point {
/// x: 0,
/// y: 0,
/// }");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(
on(
crate_local,
label = "`{Self}` cannot be formatted using `{{:?}}`",
note = "add `#[derive(Debug)]` or manually implement `{Debug}`"
),
message = "`{Self}` doesn't implement `{Debug}`",
label = "`{Self}` cannot be formatted using `{{:?}}` because it doesn't implement `{Debug}`"
)]
#[doc(alias = "{:?}")]
#[rustc_diagnostic_item = "debug_trait"]
pub trait Debug {
/// Formats the value using the given formatter.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Position {
/// longitude: f32,
/// latitude: f32,
/// }
///
/// impl fmt::Debug for Position {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// f.debug_tuple("")
/// .field(&self.longitude)
/// .field(&self.latitude)
/// .finish()
/// }
/// }
///
/// let position = Position { longitude: 1.987, latitude: 2.983 };
/// assert_eq!(format!("{:?}", position), "(1.987, 2.983)");
///
/// assert_eq!(format!("{:#?}", position), "(
/// 1.987,
/// 2.983,
/// )");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
// Separate module to reexport the macro `Debug` from prelude without the trait `Debug`.
pub(crate) mod macros {
/// Derive macro generating an impl of the trait `Debug`.
#[rustc_builtin_macro]
#[stable(feature = "builtin_macro_prelude", since = "1.38.0")]
#[allow_internal_unstable(core_intrinsics)]
pub macro Debug($item:item) {
/* compiler built-in */
}
}
#[stable(feature = "builtin_macro_prelude", since = "1.38.0")]
#[doc(inline)]
pub use macros::Debug;
/// Format trait for an empty format, `{}`.
///
/// `Display` is similar to [`Debug`][debug], but `Display` is for user-facing
/// output, and so cannot be derived.
///
/// [debug]: trait.Debug.html
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Implementing `Display` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Point {
/// x: i32,
/// y: i32,
/// }
///
/// impl fmt::Display for Point {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// write!(f, "({}, {})", self.x, self.y)
/// }
/// }
///
/// let origin = Point { x: 0, y: 0 };
///
/// assert_eq!(format!("The origin is: {}", origin), "The origin is: (0, 0)");
/// ```
#[rustc_on_unimplemented(
on(
_Self = "std::path::Path",
label = "`{Self}` cannot be formatted with the default formatter; call `.display()` on it",
note = "call `.display()` or `.to_string_lossy()` to safely print paths, \
as they may contain non-Unicode data"
),
message = "`{Self}` doesn't implement `{Display}`",
label = "`{Self}` cannot be formatted with the default formatter",
note = "in format strings you may be able to use `{{:?}}` (or {{:#?}} for pretty-print) instead"
)]
#[doc(alias = "{}")]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Display {
/// Formats the value using the given formatter.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Position {
/// longitude: f32,
/// latitude: f32,
/// }
///
/// impl fmt::Display for Position {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// write!(f, "({}, {})", self.longitude, self.latitude)
/// }
/// }
///
/// assert_eq!("(1.987, 2.983)",
/// format!("{}", Position { longitude: 1.987, latitude: 2.983, }));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `o` formatting.
///
/// The `Octal` trait should format its output as a number in base-8.
///
/// For primitive signed integers (`i8` to `i128`, and `isize`),
/// negative values are formatted as the two’s complement representation.
///
/// The alternate flag, `#`, adds a `0o` in front of the output.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `i32`:
///
/// ```
/// let x = 42; // 42 is '52' in octal
///
/// assert_eq!(format!("{:o}", x), "52");
/// assert_eq!(format!("{:#o}", x), "0o52");
///
/// assert_eq!(format!("{:o}", -16), "37777777760");
/// ```
///
/// Implementing `Octal` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::Octal for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = self.0;
///
/// fmt::Octal::fmt(&val, f) // delegate to i32's implementation
/// }
/// }
///
/// let l = Length(9);
///
/// assert_eq!(format!("l as octal is: {:o}", l), "l as octal is: 11");
///
/// assert_eq!(format!("l as octal is: {:#06o}", l), "l as octal is: 0o0011");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Octal {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `b` formatting.
///
/// The `Binary` trait should format its output as a number in binary.
///
/// For primitive signed integers ([`i8`] to [`i128`], and [`isize`]),
/// negative values are formatted as the two’s complement representation.
///
/// The alternate flag, `#`, adds a `0b` in front of the output.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// # Examples
///
/// Basic usage with [`i32`]:
///
/// ```
/// let x = 42; // 42 is '101010' in binary
///
/// assert_eq!(format!("{:b}", x), "101010");
/// assert_eq!(format!("{:#b}", x), "0b101010");
///
/// assert_eq!(format!("{:b}", -16), "11111111111111111111111111110000");
/// ```
///
/// Implementing `Binary` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::Binary for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = self.0;
///
/// fmt::Binary::fmt(&val, f) // delegate to i32's implementation
/// }
/// }
///
/// let l = Length(107);
///
/// assert_eq!(format!("l as binary is: {:b}", l), "l as binary is: 1101011");
///
/// assert_eq!(
/// format!("l as binary is: {:#032b}", l),
/// "l as binary is: 0b000000000000000000000001101011"
/// );
/// ```
///
/// [module]: ../../std/fmt/index.html
/// [`i8`]: ../../std/primitive.i8.html
/// [`i128`]: ../../std/primitive.i128.html
/// [`isize`]: ../../std/primitive.isize.html
/// [`i32`]: ../../std/primitive.i32.html
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Binary {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `x` formatting.
///
/// The `LowerHex` trait should format its output as a number in hexadecimal, with `a` through `f`
/// in lower case.
///
/// For primitive signed integers (`i8` to `i128`, and `isize`),
/// negative values are formatted as the two’s complement representation.
///
/// The alternate flag, `#`, adds a `0x` in front of the output.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `i32`:
///
/// ```
/// let x = 42; // 42 is '2a' in hex
///
/// assert_eq!(format!("{:x}", x), "2a");
/// assert_eq!(format!("{:#x}", x), "0x2a");
///
/// assert_eq!(format!("{:x}", -16), "fffffff0");
/// ```
///
/// Implementing `LowerHex` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::LowerHex for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = self.0;
///
/// fmt::LowerHex::fmt(&val, f) // delegate to i32's implementation
/// }
/// }
///
/// let l = Length(9);
///
/// assert_eq!(format!("l as hex is: {:x}", l), "l as hex is: 9");
///
/// assert_eq!(format!("l as hex is: {:#010x}", l), "l as hex is: 0x00000009");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait LowerHex {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `X` formatting.
///
/// The `UpperHex` trait should format its output as a number in hexadecimal, with `A` through `F`
/// in upper case.
///
/// For primitive signed integers (`i8` to `i128`, and `isize`),
/// negative values are formatted as the two’s complement representation.
///
/// The alternate flag, `#`, adds a `0x` in front of the output.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `i32`:
///
/// ```
/// let x = 42; // 42 is '2A' in hex
///
/// assert_eq!(format!("{:X}", x), "2A");
/// assert_eq!(format!("{:#X}", x), "0x2A");
///
/// assert_eq!(format!("{:X}", -16), "FFFFFFF0");
/// ```
///
/// Implementing `UpperHex` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::UpperHex for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = self.0;
///
/// fmt::UpperHex::fmt(&val, f) // delegate to i32's implementation
/// }
/// }
///
/// let l = Length(i32::max_value());
///
/// assert_eq!(format!("l as hex is: {:X}", l), "l as hex is: 7FFFFFFF");
///
/// assert_eq!(format!("l as hex is: {:#010X}", l), "l as hex is: 0x7FFFFFFF");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait UpperHex {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `p` formatting.
///
/// The `Pointer` trait should format its output as a memory location. This is commonly presented
/// as hexadecimal.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `&i32`:
///
/// ```
/// let x = &42;
///
/// let address = format!("{:p}", x); // this produces something like '0x7f06092ac6d0'
/// ```
///
/// Implementing `Pointer` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::Pointer for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// // use `as` to convert to a `*const T`, which implements Pointer, which we can use
///
/// let ptr = self as *const Self;
/// fmt::Pointer::fmt(&ptr, f)
/// }
/// }
///
/// let l = Length(42);
///
/// println!("l is in memory here: {:p}", l);
///
/// let l_ptr = format!("{:018p}", l);
/// assert_eq!(l_ptr.len(), 18);
/// assert_eq!(&l_ptr[..2], "0x");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Pointer {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `e` formatting.
///
/// The `LowerExp` trait should format its output in scientific notation with a lower-case `e`.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `f64`:
///
/// ```
/// let x = 42.0; // 42.0 is '4.2e1' in scientific notation
///
/// assert_eq!(format!("{:e}", x), "4.2e1");
/// ```
///
/// Implementing `LowerExp` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::LowerExp for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = f64::from(self.0);
/// fmt::LowerExp::fmt(&val, f) // delegate to f64's implementation
/// }
/// }
///
/// let l = Length(100);
///
/// assert_eq!(
/// format!("l in scientific notation is: {:e}", l),
/// "l in scientific notation is: 1e2"
/// );
///
/// assert_eq!(
/// format!("l in scientific notation is: {:05e}", l),
/// "l in scientific notation is: 001e2"
/// );
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait LowerExp {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `E` formatting.
///
/// The `UpperExp` trait should format its output in scientific notation with an upper-case `E`.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `f64`:
///
/// ```
/// let x = 42.0; // 42.0 is '4.2E1' in scientific notation
///
/// assert_eq!(format!("{:E}", x), "4.2E1");
/// ```
///
/// Implementing `UpperExp` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::UpperExp for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = f64::from(self.0);
/// fmt::UpperExp::fmt(&val, f) // delegate to f64's implementation
/// }
/// }
///
/// let l = Length(100);
///
/// assert_eq!(
/// format!("l in scientific notation is: {:E}", l),
/// "l in scientific notation is: 1E2"
/// );
///
/// assert_eq!(
/// format!("l in scientific notation is: {:05E}", l),
/// "l in scientific notation is: 001E2"
/// );
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait UpperExp {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// The `write` function takes an output stream, and an `Arguments` struct
/// that can be precompiled with the `format_args!` macro.
///
/// The arguments will be formatted according to the specified format string
/// into the output stream provided.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use std::fmt;
///
/// let mut output = String::new();
/// fmt::write(&mut output, format_args!("Hello {}!", "world"))
/// .expect("Error occurred while trying to write in String");
/// assert_eq!(output, "Hello world!");
/// ```
///
/// Please note that using [`write!`] might be preferable. Example:
///
/// ```
/// use std::fmt::Write;
///
/// let mut output = String::new();
/// write!(&mut output, "Hello {}!", "world")
/// .expect("Error occurred while trying to write in String");
/// assert_eq!(output, "Hello world!");
/// ```
///
/// [`write!`]: ../../std/macro.write.html
#[stable(feature = "rust1", since = "1.0.0")]
pub fn write(output: &mut dyn Write, args: Arguments<'_>) -> Result {
let mut formatter = Formatter {
flags: 0,
width: None,
precision: None,
buf: output,
align: rt::v1::Alignment::Unknown,
fill: ' ',
args: args.args,
curarg: args.args.iter(),
};
let mut idx = 0;
match args.fmt {
None => {
// We can use default formatting parameters for all arguments.
for (arg, piece) in args.args.iter().zip(args.pieces.iter()) {
formatter.buf.write_str(*piece)?;
(arg.formatter)(arg.value, &mut formatter)?;
idx += 1;
}
}
Some(fmt) => {
// Every spec has a corresponding argument that is preceded by
// a string piece.
for (arg, piece) in fmt.iter().zip(args.pieces.iter()) {
formatter.buf.write_str(*piece)?;
formatter.run(arg)?;
idx += 1;
}
}
}
// There can be only one trailing string piece left.
if let Some(piece) = args.pieces.get(idx) {
formatter.buf.write_str(*piece)?;
}
Ok(())
}
/// Padding after the end of something. Returned by `Formatter::padding`.
#[must_use = "don't forget to write the post padding"]
struct PostPadding {
fill: char,
padding: usize,
}
impl PostPadding {
fn new(fill: char, padding: usize) -> PostPadding {
PostPadding { fill, padding }
}
/// Write this post padding.
fn write(self, buf: &mut dyn Write) -> Result {
for _ in 0..self.padding {
buf.write_char(self.fill)?;
}
Ok(())
}
}
impl<'a> Formatter<'a> {
fn wrap_buf<'b, 'c, F>(&'b mut self, wrap: F) -> Formatter<'c>
where
'b: 'c,
F: FnOnce(&'b mut (dyn Write + 'b)) -> &'c mut (dyn Write + 'c),
{
Formatter {
// We want to change this
buf: wrap(self.buf),
// And preserve these
flags: self.flags,
fill: self.fill,
align: self.align,
width: self.width,
precision: self.precision,
// These only exist in the struct for the `run` method,
// which won’t be used together with this method.
curarg: self.curarg.clone(),
args: self.args,
}
}
// First up is the collection of functions used to execute a format string
// at runtime. This consumes all of the compile-time statics generated by
// the format! syntax extension.
fn run(&mut self, arg: &rt::v1::Argument) -> Result {
// Fill in the format parameters into the formatter
self.fill = arg.format.fill;
self.align = arg.format.align;
self.flags = arg.format.flags;
self.width = self.getcount(&arg.format.width);
self.precision = self.getcount(&arg.format.precision);
// Extract the correct argument
let value = match arg.position {
rt::v1::Position::Next => *self.curarg.next().unwrap(),
rt::v1::Position::At(i) => self.args[i],
};
// Then actually do some printing
(value.formatter)(value.value, self)
}
fn getcount(&mut self, cnt: &rt::v1::Count) -> Option<usize> {
match *cnt {
rt::v1::Count::Is(n) => Some(n),
rt::v1::Count::Implied => None,
rt::v1::Count::Param(i) => self.args[i].as_usize(),
rt::v1::Count::NextParam => self.curarg.next()?.as_usize(),
}
}
// Helper methods used for padding and processing formatting arguments that
// all formatting traits can use.
/// Performs the correct padding for an integer which has already been
/// emitted into a str. The str should *not* contain the sign for the
/// integer, that will be added by this method.
///
/// # Arguments
///
/// * is_nonnegative - whether the original integer was either positive or zero.
/// * prefix - if the '#' character (Alternate) is provided, this
/// is the prefix to put in front of the number.
/// * buf - the byte array that the number has been formatted into
///
/// This function will correctly account for the flags provided as well as
/// the minimum width. It will not take precision into account.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo { nb: i32 };
///
/// impl Foo {
/// fn new(nb: i32) -> Foo {
/// Foo {
/// nb,
/// }
/// }
/// }
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// // We need to remove "-" from the number output.
/// let tmp = self.nb.abs().to_string();
///
/// formatter.pad_integral(self.nb > 0, "Foo ", &tmp)
/// }
/// }
///
/// assert_eq!(&format!("{}", Foo::new(2)), "2");
/// assert_eq!(&format!("{}", Foo::new(-1)), "-1");
/// assert_eq!(&format!("{:#}", Foo::new(-1)), "-Foo 1");
/// assert_eq!(&format!("{:0>#8}", Foo::new(-1)), "00-Foo 1");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn pad_integral(&mut self, is_nonnegative: bool, prefix: &str, buf: &str) -> Result {
let mut width = buf.len();
let mut sign = None;
if !is_nonnegative {
sign = Some('-');
width += 1;
} else if self.sign_plus() {
sign = Some('+');
width += 1;
}
let prefix = if self.alternate() {
width += prefix.chars().count();
Some(prefix)
} else {
None
};
// Writes the sign if it exists, and then the prefix if it was requested
#[inline(never)]
fn write_prefix(f: &mut Formatter<'_>, sign: Option<char>, prefix: Option<&str>) -> Result {
if let Some(c) = sign {
f.buf.write_char(c)?;
}
if let Some(prefix) = prefix { f.buf.write_str(prefix) } else { Ok(()) }
}
// The `width` field is more of a `min-width` parameter at this point.
match self.width {
// If there's no minimum length requirements then we can just
// write the bytes.
None => {
write_prefix(self, sign, prefix)?;
self.buf.write_str(buf)
}
// Check if we're over the minimum width, if so then we can also
// just write the bytes.
Some(min) if width >= min => {
write_prefix(self, sign, prefix)?;
self.buf.write_str(buf)
}
// The sign and prefix goes before the padding if the fill character
// is zero
Some(min) if self.sign_aware_zero_pad() => {
self.fill = '0';
self.align = rt::v1::Alignment::Right;
write_prefix(self, sign, prefix)?;
let post_padding = self.padding(min - width, rt::v1::Alignment::Right)?;
self.buf.write_str(buf)?;
post_padding.write(self.buf)
}
// Otherwise, the sign and prefix goes after the padding
Some(min) => {
let post_padding = self.padding(min - width, rt::v1::Alignment::Right)?;
write_prefix(self, sign, prefix)?;
self.buf.write_str(buf)?;
post_padding.write(self.buf)
}
}
}
/// This function takes a string slice and emits it to the internal buffer
/// after applying the relevant formatting flags specified. The flags
/// recognized for generic strings are:
///
/// * width - the minimum width of what to emit
/// * fill/align - what to emit and where to emit it if the string
/// provided needs to be padded
/// * precision - the maximum length to emit, the string is truncated if it
/// is longer than this length
///
/// Notably this function ignores the `flag` parameters.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo;
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// formatter.pad("Foo")
/// }
/// }
///
/// assert_eq!(&format!("{:<4}", Foo), "Foo ");
/// assert_eq!(&format!("{:0>4}", Foo), "0Foo");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn pad(&mut self, s: &str) -> Result {
// Make sure there's a fast path up front
if self.width.is_none() && self.precision.is_none() {
return self.buf.write_str(s);
}
// The `precision` field can be interpreted as a `max-width` for the
// string being formatted.
let s = if let Some(max) = self.precision {
// If our string is longer that the precision, then we must have
// truncation. However other flags like `fill`, `width` and `align`
// must act as always.
if let Some((i, _)) = s.char_indices().nth(max) {
// LLVM here can't prove that `..i` won't panic `&s[..i]`, but
// we know that it can't panic. Use `get` + `unwrap_or` to avoid
// `unsafe` and otherwise don't emit any panic-related code
// here.
s.get(..i).unwrap_or(&s)
} else {
&s
}
} else {
&s
};
// The `width` field is more of a `min-width` parameter at this point.
match self.width {
// If we're under the maximum length, and there's no minimum length
// requirements, then we can just emit the string
None => self.buf.write_str(s),
// If we're under the maximum width, check if we're over the minimum
// width, if so it's as easy as just emitting the string.
Some(width) if s.chars().count() >= width => self.buf.write_str(s),
// If we're under both the maximum and the minimum width, then fill
// up the minimum width with the specified string + some alignment.
Some(width) => {
let align = rt::v1::Alignment::Left;
let post_padding = self.padding(width - s.chars().count(), align)?;
self.buf.write_str(s)?;
post_padding.write(self.buf)
}
}
}
/// Write the pre-padding and return the unwritten post-padding. Callers are
/// responsible for ensuring post-padding is written after the thing that is
/// being padded.
fn padding(
&mut self,
padding: usize,
default: rt::v1::Alignment,
) -> result::Result<PostPadding, Error> {
let align = match self.align {
rt::v1::Alignment::Unknown => default,
_ => self.align,
};
let (pre_pad, post_pad) = match align {
rt::v1::Alignment::Left => (0, padding),
rt::v1::Alignment::Right | rt::v1::Alignment::Unknown => (padding, 0),
rt::v1::Alignment::Center => (padding / 2, (padding + 1) / 2),
};
for _ in 0..pre_pad {
self.buf.write_char(self.fill)?;
}
Ok(PostPadding::new(self.fill, post_pad))
}
/// Takes the formatted parts and applies the padding.
/// Assumes that the caller already has rendered the parts with required precision,
/// so that `self.precision` can be ignored.
fn pad_formatted_parts(&mut self, formatted: &flt2dec::Formatted<'_>) -> Result {
if let Some(mut width) = self.width {
// for the sign-aware zero padding, we render the sign first and
// behave as if we had no sign from the beginning.
let mut formatted = formatted.clone();
let old_fill = self.fill;
let old_align = self.align;
let mut align = old_align;
if self.sign_aware_zero_pad() {
// a sign always goes first
let sign = unsafe { str::from_utf8_unchecked(formatted.sign) };
self.buf.write_str(sign)?;
// remove the sign from the formatted parts
formatted.sign = b"";
width = width.saturating_sub(sign.len());
align = rt::v1::Alignment::Right;
self.fill = '0';
self.align = rt::v1::Alignment::Right;
}
// remaining parts go through the ordinary padding process.
let len = formatted.len();
let ret = if width <= len {
// no padding
self.write_formatted_parts(&formatted)
} else {
let post_padding = self.padding(width - len, align)?;
self.write_formatted_parts(&formatted)?;
post_padding.write(self.buf)
};
self.fill = old_fill;
self.align = old_align;
ret
} else {
// this is the common case and we take a shortcut
self.write_formatted_parts(formatted)
}
}
fn write_formatted_parts(&mut self, formatted: &flt2dec::Formatted<'_>) -> Result {
fn write_bytes(buf: &mut dyn Write, s: &[u8]) -> Result {
buf.write_str(unsafe { str::from_utf8_unchecked(s) })
}
if !formatted.sign.is_empty() {
write_bytes(self.buf, formatted.sign)?;
}
for part in formatted.parts {
match *part {
flt2dec::Part::Zero(mut nzeroes) => {
const ZEROES: &str = // 64 zeroes
"0000000000000000000000000000000000000000000000000000000000000000";
while nzeroes > ZEROES.len() {
self.buf.write_str(ZEROES)?;
nzeroes -= ZEROES.len();
}
if nzeroes > 0 {
self.buf.write_str(&ZEROES[..nzeroes])?;
}
}
flt2dec::Part::Num(mut v) => {
let mut s = [0; 5];
let len = part.len();
for c in s[..len].iter_mut().rev() {
*c = b'0' + (v % 10) as u8;
v /= 10;
}
write_bytes(self.buf, &s[..len])?;
}
flt2dec::Part::Copy(buf) => {
write_bytes(self.buf, buf)?;
}
}
}
Ok(())
}
/// Writes some data to the underlying buffer contained within this
/// formatter.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo;
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// formatter.write_str("Foo")
/// // This is equivalent to:
/// // write!(formatter, "Foo")
/// }
/// }
///
/// assert_eq!(&format!("{}", Foo), "Foo");
/// assert_eq!(&format!("{:0>8}", Foo), "Foo");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn write_str(&mut self, data: &str) -> Result {
self.buf.write_str(data)
}
/// Writes some formatted information into this instance.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// formatter.write_fmt(format_args!("Foo {}", self.0))
/// }
/// }
///
/// assert_eq!(&format!("{}", Foo(-1)), "Foo -1");
/// assert_eq!(&format!("{:0>8}", Foo(2)), "Foo 2");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn write_fmt(&mut self, fmt: Arguments<'_>) -> Result {
write(self.buf, fmt)
}
/// Flags for formatting
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_deprecated(
since = "1.24.0",
reason = "use the `sign_plus`, `sign_minus`, `alternate`, \
or `sign_aware_zero_pad` methods instead"
)]
pub fn flags(&self) -> u32 {
self.flags
}
/// Character used as 'fill' whenever there is alignment.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo;
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// let c = formatter.fill();
/// if let Some(width) = formatter.width() {
/// for _ in 0..width {
/// write!(formatter, "{}", c)?;
/// }
/// Ok(())
/// } else {
/// write!(formatter, "{}", c)
/// }
/// }
/// }
///
/// // We set alignment to the left with ">".
/// assert_eq!(&format!("{:G>3}", Foo), "GGG");
/// assert_eq!(&format!("{:t>6}", Foo), "tttttt");
/// ```
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn fill(&self) -> char {
self.fill
}
/// Flag indicating what form of alignment was requested.
///
/// # Examples
///
/// ```
/// extern crate core;
///
/// use std::fmt::{self, Alignment};
///
/// struct Foo;
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// let s = if let Some(s) = formatter.align() {
/// match s {
/// Alignment::Left => "left",
/// Alignment::Right => "right",
/// Alignment::Center => "center",
/// }
/// } else {
/// "into the void"
/// };
/// write!(formatter, "{}", s)
/// }
/// }
///
/// assert_eq!(&format!("{:<}", Foo), "left");
/// assert_eq!(&format!("{:>}", Foo), "right");
/// assert_eq!(&format!("{:^}", Foo), "center");
/// assert_eq!(&format!("{}", Foo), "into the void");
/// ```
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
pub fn align(&self) -> Option<Alignment> {
match self.align {
rt::v1::Alignment::Left => Some(Alignment::Left),
rt::v1::Alignment::Right => Some(Alignment::Right),
rt::v1::Alignment::Center => Some(Alignment::Center),
rt::v1::Alignment::Unknown => None,
}
}
/// Optionally specified integer width that the output should be.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if let Some(width) = formatter.width() {
/// // If we received a width, we use it
/// write!(formatter, "{:width$}", &format!("Foo({})", self.0), width = width)
/// } else {
/// // Otherwise we do nothing special
/// write!(formatter, "Foo({})", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:10}", Foo(23)), "Foo(23) ");
/// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
/// ```
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn width(&self) -> Option<usize> {
self.width
}
/// Optionally specified precision for numeric types.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(f32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if let Some(precision) = formatter.precision() {
/// // If we received a precision, we use it.
/// write!(formatter, "Foo({1:.*})", precision, self.0)
/// } else {
/// // Otherwise we default to 2.
/// write!(formatter, "Foo({:.2})", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:.4}", Foo(23.2)), "Foo(23.2000)");
/// assert_eq!(&format!("{}", Foo(23.2)), "Foo(23.20)");
/// ```
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn precision(&self) -> Option<usize> {
self.precision
}
/// Determines if the `+` flag was specified.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if formatter.sign_plus() {
/// write!(formatter,
/// "Foo({}{})",
/// if self.0 < 0 { '-' } else { '+' },
/// self.0)
/// } else {
/// write!(formatter, "Foo({})", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:+}", Foo(23)), "Foo(+23)");
/// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
/// ```
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn sign_plus(&self) -> bool {
self.flags & (1 << FlagV1::SignPlus as u32) != 0
}
/// Determines if the `-` flag was specified.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if formatter.sign_minus() {
/// // You want a minus sign? Have one!
/// write!(formatter, "-Foo({})", self.0)
/// } else {
/// write!(formatter, "Foo({})", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:-}", Foo(23)), "-Foo(23)");
/// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
/// ```
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn sign_minus(&self) -> bool {
self.flags & (1 << FlagV1::SignMinus as u32) != 0
}
/// Determines if the `#` flag was specified.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if formatter.alternate() {
/// write!(formatter, "Foo({})", self.0)
/// } else {
/// write!(formatter, "{}", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:#}", Foo(23)), "Foo(23)");
/// assert_eq!(&format!("{}", Foo(23)), "23");
/// ```
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn alternate(&self) -> bool {
self.flags & (1 << FlagV1::Alternate as u32) != 0
}
/// Determines if the `0` flag was specified.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// assert!(formatter.sign_aware_zero_pad());
/// assert_eq!(formatter.width(), Some(4));
/// // We ignore the formatter's options.
/// write!(formatter, "{}", self.0)
/// }
/// }
///
/// assert_eq!(&format!("{:04}", Foo(23)), "23");
/// ```
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn sign_aware_zero_pad(&self) -> bool {
self.flags & (1 << FlagV1::SignAwareZeroPad as u32) != 0
}
// FIXME: Decide what public API we want for these two flags.
// https://github.com/rust-lang/rust/issues/48584
fn debug_lower_hex(&self) -> bool {
self.flags & (1 << FlagV1::DebugLowerHex as u32) != 0
}
fn debug_upper_hex(&self) -> bool {
self.flags & (1 << FlagV1::DebugUpperHex as u32) != 0
}
/// Creates a [`DebugStruct`] builder designed to assist with creation of
/// [`fmt::Debug`] implementations for structs.
///
/// [`DebugStruct`]: ../../std/fmt/struct.DebugStruct.html
/// [`fmt::Debug`]: ../../std/fmt/trait.Debug.html
///
/// # Examples
///
/// ```rust
/// use std::fmt;
/// use std::net::Ipv4Addr;
///
/// struct Foo {
/// bar: i32,
/// baz: String,
/// addr: Ipv4Addr,
/// }
///
/// impl fmt::Debug for Foo {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_struct("Foo")
/// .field("bar", &self.bar)
/// .field("baz", &self.baz)
/// .field("addr", &format_args!("{}", self.addr))
/// .finish()
/// }
/// }
///
/// assert_eq!(
/// "Foo { bar: 10, baz: \"Hello World\", addr: 127.0.0.1 }",
/// format!("{:?}", Foo {
/// bar: 10,
/// baz: "Hello World".to_string(),
/// addr: Ipv4Addr::new(127, 0, 0, 1),
/// })
/// );
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_struct<'b>(&'b mut self, name: &str) -> DebugStruct<'b, 'a> {
builders::debug_struct_new(self, name)
}
/// Creates a `DebugTuple` builder designed to assist with creation of
/// `fmt::Debug` implementations for tuple structs.
///
/// # Examples
///
/// ```rust
/// use std::fmt;
/// use std::marker::PhantomData;
///
/// struct Foo<T>(i32, String, PhantomData<T>);
///
/// impl<T> fmt::Debug for Foo<T> {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_tuple("Foo")
/// .field(&self.0)
/// .field(&self.1)
/// .field(&format_args!("_"))
/// .finish()
/// }
/// }
///
/// assert_eq!(
/// "Foo(10, \"Hello\", _)",
/// format!("{:?}", Foo(10, "Hello".to_string(), PhantomData::<u8>))
/// );
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_tuple<'b>(&'b mut self, name: &str) -> DebugTuple<'b, 'a> {
builders::debug_tuple_new(self, name)
}
/// Creates a `DebugList` builder designed to assist with creation of
/// `fmt::Debug` implementations for list-like structures.
///
/// # Examples
///
/// ```rust
/// use std::fmt;
///
/// struct Foo(Vec<i32>);
///
/// impl fmt::Debug for Foo {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_list().entries(self.0.iter()).finish()
/// }
/// }
///
/// assert_eq!(format!("{:?}", Foo(vec![10, 11])), "[10, 11]");
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_list<'b>(&'b mut self) -> DebugList<'b, 'a> {
builders::debug_list_new(self)
}
/// Creates a `DebugSet` builder designed to assist with creation of
/// `fmt::Debug` implementations for set-like structures.
///
/// # Examples
///
/// ```rust
/// use std::fmt;
///
/// struct Foo(Vec<i32>);
///
/// impl fmt::Debug for Foo {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_set().entries(self.0.iter()).finish()
/// }
/// }
///
/// assert_eq!(format!("{:?}", Foo(vec![10, 11])), "{10, 11}");
/// ```
///
/// [`format_args!`]: ../../std/macro.format_args.html
///
/// In this more complex example, we use [`format_args!`] and `.debug_set()`
/// to build a list of match arms:
///
/// ```rust
/// use std::fmt;
///
/// struct Arm<'a, L: 'a, R: 'a>(&'a (L, R));
/// struct Table<'a, K: 'a, V: 'a>(&'a [(K, V)], V);
///
/// impl<'a, L, R> fmt::Debug for Arm<'a, L, R>
/// where
/// L: 'a + fmt::Debug, R: 'a + fmt::Debug
/// {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// L::fmt(&(self.0).0, fmt)?;
/// fmt.write_str(" => ")?;
/// R::fmt(&(self.0).1, fmt)
/// }
/// }
///
/// impl<'a, K, V> fmt::Debug for Table<'a, K, V>
/// where
/// K: 'a + fmt::Debug, V: 'a + fmt::Debug
/// {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_set()
/// .entries(self.0.iter().map(Arm))
/// .entry(&Arm(&(format_args!("_"), &self.1)))
/// .finish()
/// }
/// }
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_set<'b>(&'b mut self) -> DebugSet<'b, 'a> {
builders::debug_set_new(self)
}
/// Creates a `DebugMap` builder designed to assist with creation of
/// `fmt::Debug` implementations for map-like structures.
///
/// # Examples
///
/// ```rust
/// use std::fmt;
///
/// struct Foo(Vec<(String, i32)>);
///
/// impl fmt::Debug for Foo {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_map().entries(self.0.iter().map(|&(ref k, ref v)| (k, v))).finish()
/// }
/// }
///
/// assert_eq!(
/// format!("{:?}", Foo(vec![("A".to_string(), 10), ("B".to_string(), 11)])),
/// r#"{"A": 10, "B": 11}"#
/// );
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_map<'b>(&'b mut self) -> DebugMap<'b, 'a> {
builders::debug_map_new(self)
}
}
#[stable(since = "1.2.0", feature = "formatter_write")]
impl Write for Formatter<'_> {
fn write_str(&mut self, s: &str) -> Result {
self.buf.write_str(s)
}
fn write_char(&mut self, c: char) -> Result {
self.buf.write_char(c)
}
fn write_fmt(&mut self, args: Arguments<'_>) -> Result {
write(self.buf, args)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Display::fmt("an error occurred when formatting an argument", f)
}
}
// Implementations of the core formatting traits
macro_rules! fmt_refs {
($($tr:ident),*) => {
$(
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + $tr> $tr for &T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result { $tr::fmt(&**self, f) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + $tr> $tr for &mut T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result { $tr::fmt(&**self, f) }
}
)*
}
}
fmt_refs! { Debug, Display, Octal, Binary, LowerHex, UpperHex, LowerExp, UpperExp }
#[unstable(feature = "never_type", issue = "35121")]
impl Debug for ! {
fn fmt(&self, _: &mut Formatter<'_>) -> Result {
*self
}
}
#[unstable(feature = "never_type", issue = "35121")]
impl Display for ! {
fn fmt(&self, _: &mut Formatter<'_>) -> Result {
*self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for bool {
#[inline]
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Display::fmt(self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for bool {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Display::fmt(if *self { "true" } else { "false" }, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for str {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.write_char('"')?;
let mut from = 0;
for (i, c) in self.char_indices() {
let esc = c.escape_debug();
// If char needs escaping, flush backlog so far and write, else skip
if esc.len() != 1 {
f.write_str(&self[from..i])?;
for c in esc {
f.write_char(c)?;
}
from = i + c.len_utf8();
}
}
f.write_str(&self[from..])?;
f.write_char('"')
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for str {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.pad(self)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for char {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.write_char('\'')?;
for c in self.escape_debug() {
f.write_char(c)?
}
f.write_char('\'')
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for char {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
if f.width.is_none() && f.precision.is_none() {
f.write_char(*self)
} else {
f.pad(self.encode_utf8(&mut [0; 4]))
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Pointer for *const T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
let old_width = f.width;
let old_flags = f.flags;
// The alternate flag is already treated by LowerHex as being special-
// it denotes whether to prefix with 0x. We use it to work out whether
// or not to zero extend, and then unconditionally set it to get the
// prefix.
if f.alternate() {
f.flags |= 1 << (FlagV1::SignAwareZeroPad as u32);
if f.width.is_none() {
f.width = Some(((mem::size_of::<usize>() * 8) / 4) + 2);
}
}
f.flags |= 1 << (FlagV1::Alternate as u32);
let ret = LowerHex::fmt(&(*self as *const () as usize), f);
f.width = old_width;
f.flags = old_flags;
ret
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Pointer for *mut T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(&(*self as *const T), f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Pointer for &T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(&(*self as *const T), f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Pointer for &mut T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(&(&**self as *const T), f)
}
}
// Implementation of Display/Debug for various core types
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Debug for *const T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Debug for *mut T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(self, f)
}
}
macro_rules! peel {
($name:ident, $($other:ident,)*) => (tuple! { $($other,)* })
}
macro_rules! tuple {
() => ();
( $($name:ident,)+ ) => (
#[stable(feature = "rust1", since = "1.0.0")]
impl<$($name:Debug),+> Debug for ($($name,)+) where last_type!($($name,)+): ?Sized {
#[allow(non_snake_case, unused_assignments)]
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
let mut builder = f.debug_tuple("");
let ($(ref $name,)+) = *self;
$(
builder.field(&$name);
)+
builder.finish()
}
}
peel! { $($name,)+ }
)
}
macro_rules! last_type {
($a:ident,) => { $a };
($a:ident, $($rest_a:ident,)+) => { last_type!($($rest_a,)+) };
}
tuple! { T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, }
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Debug> Debug for [T] {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.debug_list().entries(self.iter()).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for () {
#[inline]
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.pad("()")
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Debug for PhantomData<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.pad("PhantomData")
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Copy + Debug> Debug for Cell<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.debug_struct("Cell").field("value", &self.get()).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Debug> Debug for RefCell<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
match self.try_borrow() {
Ok(borrow) => f.debug_struct("RefCell").field("value", &borrow).finish(),
Err(_) => {
// The RefCell is mutably borrowed so we can't look at its value
// here. Show a placeholder instead.
struct BorrowedPlaceholder;
impl Debug for BorrowedPlaceholder {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.write_str("<borrowed>")
}
}
f.debug_struct("RefCell").field("value", &BorrowedPlaceholder).finish()
}
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Debug> Debug for Ref<'_, T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Debug::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Debug> Debug for RefMut<'_, T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Debug::fmt(&*(self.deref()), f)
}
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: ?Sized + Debug> Debug for UnsafeCell<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
| you expected tests to be here, look instead at the ui/ifmt.rs test,
// it's a lot easier than creating all of the rt::Piece structures here.
| f.pad("UnsafeCell")
}
}
// If |
main.go | package main
import (
"archive/zip"
"bytes"
"compress/gzip"
"crypto/aes"
"crypto/cipher"
"crypto/rand"
"crypto/sha256"
"crypto/sha512"
"encoding/base64"
"encoding/hex"
"errors"
"flag"
"fmt"
"io"
"io/ioutil"
"log"
"net/http"
"os"
"os/exec"
"path/filepath"
"plugin"
"regexp"
"sort"
"strconv"
"strings"
"sync"
"time"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"github.com/otyg/threagile/macros"
add_build_pipeline "github.com/otyg/threagile/macros/built-in/add-build-pipeline"
add_vault "github.com/otyg/threagile/macros/built-in/add-vault"
pretty_print "github.com/otyg/threagile/macros/built-in/pretty-print"
remove_unused_tags "github.com/otyg/threagile/macros/built-in/remove-unused-tags"
seed_risk_tracking "github.com/otyg/threagile/macros/built-in/seed-risk-tracking"
seed_tags "github.com/otyg/threagile/macros/built-in/seed-tags"
"github.com/otyg/threagile/model"
"github.com/otyg/threagile/model/confidentiality"
"github.com/otyg/threagile/model/core"
"github.com/otyg/threagile/model/criticality"
"github.com/otyg/threagile/report"
"github.com/otyg/threagile/support"
"github.com/santhosh-tekuri/jsonschema/v5"
"golang.org/x/crypto/argon2"
"gopkg.in/yaml.v3"
)
const keepDiagramSourceFiles = false
const defaultGraphvizDPI, maxGraphvizDPI = 120, 240
const backupHistoryFilesToKeep = 50
const baseFolder, reportFilename, excelRisksFilename, excelTagsFilename, jsonRisksFilename, jsonTechnicalAssetsFilename, jsonStatsFilename, dataFlowDiagramFilenameDOT, dataFlowDiagramFilenamePNG, dataAssetDiagramFilenameDOT, dataAssetDiagramFilenamePNG, graphvizDataFlowDiagramConversionCall, graphvizDataAssetDiagramConversionCall = "/data", "report.pdf", "risks.xlsx", "tags.xlsx", "risks.json", "technical-assets.json", "stats.json", "data-flow-diagram.gv", "data-flow-diagram.png", "data-asset-diagram.gv", "data-asset-diagram.png", "render-data-flow-diagram.sh", "render-data-asset-diagram.sh"
var globalLock sync.Mutex
var successCount, errorCount = 0, 0
var modelInput model.ModelInput
var drawSpaceLinesForLayoutUnfortunatelyFurtherSeparatesAllRanks = true
var buildTimestamp = ""
var modelFilename, templateFilename /*, diagramFilename, reportFilename, graphvizConversion*/ *string
var createExampleModel, createStubModel, createEditingSupport, verbose, ignoreOrphanedRiskTracking, generateDataFlowDiagram, generateDataAssetDiagram, generateRisksJSON, generateTechnicalAssetsJSON, generateStatsJSON, generateRisksExcel, generateTagsExcel, generateReportPDF, generateDefectdojoGeneric *bool
var outputDir, raaPlugin, skipRiskRules, riskRulesPlugins, executeModelMacro *string
var builtinRiskRulesPlugins map[string]model.RiskRule
var diagramDPI, serverPort *int
var deferredRiskTrackingDueToWildcardMatching = make(map[string]model.RiskTracking)
func applyRiskGeneration() {
if *verbose {
fmt.Println("Applying risk generation")
}
skippedRules := make(map[string]interface{})
if len(*skipRiskRules) > 0 {
for _, id := range strings.Split(*skipRiskRules, ",") {
skippedRules[id] = true
}
}
for id, riskPlugin := range builtinRiskRulesPlugins {
if _, ok := skippedRules[riskPlugin.Category().Id]; ok {
fmt.Println("Skipping risk rule:", id)
delete(skippedRules, id)
} else {
model.AddToListOfSupportedTags(riskPlugin.SupportedTags())
risks := riskPlugin.GenerateRisks()
if len(risks) > 0 {
model.GeneratedRisksByCategory[riskPlugin.Category()] = risks
}
}
}
if len(skippedRules) > 0 {
keys := make([]string, 0)
for k := range skippedRules {
keys = append(keys, k)
}
if len(keys) > 0 {
log.Println("Unknown risk rules to skip:", keys)
}
}
// save also in map keyed by synthetic risk-id
for _, category := range model.SortedRiskCategories() {
risks := model.SortedRisksOfCategory(category)
for _, risk := range risks {
model.GeneratedRisksBySyntheticId[strings.ToLower(risk.SyntheticId)] = risk
}
}
}
func checkRiskTracking() {
if *verbose {
fmt.Println("Checking risk tracking")
}
for _, tracking := range model.ParsedModelRoot.RiskTracking {
if _, ok := model.GeneratedRisksBySyntheticId[tracking.SyntheticRiskId]; !ok {
if *ignoreOrphanedRiskTracking {
fmt.Println("Risk tracking references unknown risk (risk id not found): " + tracking.SyntheticRiskId)
} else {
panic(errors.New("Risk tracking references unknown risk (risk id not found) - you might want to use the option -ignore-orphaned-risk-tracking: " + tracking.SyntheticRiskId +
"\n\nNOTE: For risk tracking each risk-id needs to be defined (the string with the @ sign in it). " +
"These unique risk IDs are visible in the PDF report (the small grey string under each risk), " +
"the Excel (column \"ID\"), as well as the JSON responses. Some risk IDs have only one @ sign in them, " +
"while others multiple. The idea is to allow for unique but still speaking IDs. Therefore each risk instance " +
"creates its individual ID by taking all affected elements causing the risk to be within an @-delimited part. " +
"Using wildcards (the * sign) for parts delimited by @ signs allows to handle groups of certain risks at once. " +
"Best is to lookup the IDs to use in the created Excel file. Alternatively a model macro \"seed-risk-tracking\" " +
"is available that helps in initially seeding the risk tracking part here based on already identified and not yet handled risks."))
}
}
}
// save also the risk-category-id and risk-status directly in the risk for better JSON marshalling
for category, _ := range model.GeneratedRisksByCategory {
for i, _ := range model.GeneratedRisksByCategory[category] {
model.GeneratedRisksByCategory[category][i].CategoryId = category.Id
model.GeneratedRisksByCategory[category][i].RiskStatus = model.GeneratedRisksByCategory[category][i].GetRiskTrackingStatusDefaultingUnchecked()
}
}
}
// === Error handling stuff ========================================
func main() {
parseCommandlineArgs()
if *serverPort > 0 {
startServer()
} else {
doIt(*modelFilename, *outputDir)
}
}
// Unzip will decompress a zip archive, moving all files and folders
// within the zip file (parameter 1) to an output directory (parameter 2).
func unzip(src string, dest string) ([]string, error) {
var filenames []string
r, err := zip.OpenReader(src)
if err != nil {
return filenames, err
}
defer r.Close()
for _, f := range r.File {
// Store filename/path for returning and using later on
fpath := filepath.Join(dest, f.Name)
// Check for ZipSlip. More Info: http://bit.ly/2MsjAWE
if !strings.HasPrefix(fpath, filepath.Clean(dest)+string(os.PathSeparator)) {
return filenames, fmt.Errorf("%s: illegal file path", fpath)
}
filenames = append(filenames, fpath)
if f.FileInfo().IsDir() {
// Make Folder
os.MkdirAll(fpath, os.ModePerm)
continue
}
// Make File
if err = os.MkdirAll(filepath.Dir(fpath), os.ModePerm); err != nil {
return filenames, err
}
outFile, err := os.OpenFile(fpath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode())
if err != nil {
return filenames, err
}
rc, err := f.Open()
if err != nil {
return filenames, err
}
_, err = io.Copy(outFile, rc)
// Close the file without defer to close before next iteration of loop
outFile.Close()
rc.Close()
if err != nil {
return filenames, err
}
}
return filenames, nil
}
// ZipFiles compresses one or many files into a single zip archive file.
// Param 1: filename is the output zip file's name.
// Param 2: files is a list of files to add to the zip.
func zipFiles(filename string, files []string) error {
newZipFile, err := os.Create(filename)
if err != nil {
return err
}
defer newZipFile.Close()
zipWriter := zip.NewWriter(newZipFile)
defer zipWriter.Close()
// Add files to zip
for _, file := range files {
if err = addFileToZip(zipWriter, file); err != nil {
return err
}
}
return nil
}
func addFileToZip(zipWriter *zip.Writer, filename string) error {
fileToZip, err := os.Open(filename)
if err != nil {
return err
}
defer fileToZip.Close()
// Get the file information
info, err := fileToZip.Stat()
if err != nil {
return err
}
header, err := zip.FileInfoHeader(info)
if err != nil {
return err
}
// Using FileInfoHeader() above only uses the basename of the file. If we want
// to preserve the folder structure we can overwrite this with the full path.
//header.Name = filename
// Change to deflate to gain better compression
// see http://golang.org/pkg/archive/zip/#pkg-constants
header.Method = zip.Deflate
writer, err := zipWriter.CreateHeader(header)
if err != nil {
return err
}
_, err = io.Copy(writer, fileToZip)
return err
}
func doIt(inputFilename string, outputDirectory string) {
defer func() {
var err error
if r := recover(); r != nil {
err = r.(error)
if *verbose {
log.Println(err)
}
os.Stderr.WriteString(err.Error() + "\n")
os.Exit(2)
}
}()
if len(*executeModelMacro) > 0 {
printLogo()
} else {
if *verbose {
fmt.Println("Writing into output directory:", outputDirectory)
}
}
model.Init()
parseModel(inputFilename)
introTextRAA := applyRAA()
loadRiskRulePlugins()
applyRiskGeneration()
applyWildcardRiskTrackingEvaluation()
checkRiskTracking()
if len(*executeModelMacro) > 0 {
macros.ExecuteModelMacro(executeModelMacro, modelInput, inputFilename)
}
renderDataFlowDiagram, renderDataAssetDiagram, renderRisksJSON, renderTechnicalAssetsJSON, renderStatsJSON, renderRisksExcel, renderTagsExcel, renderPDF, renderDefectDojo := *generateDataFlowDiagram, *generateDataAssetDiagram, *generateRisksJSON, *generateTechnicalAssetsJSON, *generateStatsJSON, *generateRisksExcel, *generateTagsExcel, *generateReportPDF, *generateDefectdojoGeneric
if renderPDF { // as the PDF report includes both diagrams
renderDataFlowDiagram, renderDataAssetDiagram = true, true
}
// Data-flow Diagram rendering
if renderDataFlowDiagram {
report.RenderDataFlowDiagram(outputDirectory, keepDiagramSourceFiles, diagramDPI, verbose)
}
// Data Asset Diagram rendering
if renderDataAssetDiagram {
report.RenderDataAssetDiagram(outputDirectory, dataAssetDiagramFilenameDOT, keepDiagramSourceFiles, diagramDPI, verbose)
}
if renderDefectDojo {
if *verbose {
fmt.Println("Writing risks defectdojo generic json")
}
report.WriteDefectdojoGeneric(outputDirectory + "/defectdojo.json")
report.WriteOpenSarif(outputDirectory + "/risks.sarif")
}
// risks as risks json
if renderRisksJSON {
if *verbose {
fmt.Println("Writing risks json")
}
report.WriteRisksJSON(outputDirectory + "/" + jsonRisksFilename)
}
// technical assets json
if renderTechnicalAssetsJSON {
if *verbose {
fmt.Println("Writing technical assets json")
}
report.WriteTechnicalAssetsJSON(outputDirectory + "/" + jsonTechnicalAssetsFilename)
}
// risks as risks json
if renderStatsJSON {
if *verbose {
fmt.Println("Writing stats json")
}
report.WriteStatsJSON(outputDirectory + "/" + jsonStatsFilename)
}
// risks Excel
if renderRisksExcel {
if *verbose {
fmt.Println("Writing risks excel")
}
report.WriteRisksExcelToFile(outputDirectory + "/" + excelRisksFilename)
}
// tags Excel
if renderTagsExcel {
if *verbose {
fmt.Println("Writing tags excel")
}
report.WriteTagsExcelToFile(outputDirectory + "/" + excelTagsFilename)
}
if renderPDF {
// hash the YAML input file
f, err := os.Open(inputFilename)
support.CheckErr(err)
defer f.Close()
hasher := sha256.New()
if _, err := io.Copy(hasher, f); err != nil {
panic(err)
}
modelHash := hex.EncodeToString(hasher.Sum(nil))
// report PDF
if *verbose {
fmt.Println("Writing report pdf")
}
report.WriteReportPDF(outputDirectory+"/"+reportFilename,
*templateFilename,
outputDirectory+"/"+dataFlowDiagramFilenamePNG,
outputDirectory+"/"+dataAssetDiagramFilenamePNG,
inputFilename,
*skipRiskRules,
buildTimestamp,
modelHash,
introTextRAA,
builtinRiskRulesPlugins)
}
}
func applyRAA() string {
if *verbose {
fmt.Println("Applying RAA calculation:", *raaPlugin)
}
// determine plugin to load
// load plugin: open the ".so" file to load the symbols
plug, err := plugin.Open(*raaPlugin)
support.CheckErr(err)
// look up a symbol (an exported function or variable): in this case, function CalculateRAA
symCalculateRAA, err := plug.Lookup("CalculateRAA")
support.CheckErr(err)
// use the plugin
raaCalcFunc, ok := symCalculateRAA.(func() string) // symCalculateRAA.(func(model.ParsedModel) string)
if !ok {
panic(errors.New("RAA plugin has no 'CalculateRAA() string' function"))
}
// call it
return raaCalcFunc()
}
func loadRiskRulePlugins() {
builtinRiskRulesPlugins = make(map[string]model.RiskRule)
pluginFiles, err := filepath.Glob("risk-plugins/*.so")
if err != nil {
panic(errors.New(err.Error()))
}
for _, pluginFile := range pluginFiles {
_, err := os.Stat(pluginFile)
if os.IsNotExist(err) {
log.Fatal("Risk rule implementation file not found: ", pluginFile)
}
plug, err := plugin.Open(pluginFile)
support.CheckErr(err)
// look up a symbol (an exported function or variable): in this case variable CustomRiskRule
symRiskRule, err := plug.Lookup("RiskRule")
support.CheckErr(err)
// register the risk rule plugin for later use: in this case interface type model.RiskRule (defined above)
symRiskRuleVar, ok := symRiskRule.(model.RiskRule)
if !ok {
panic(errors.New("Risk rule plugin has no 'RiskRule' variable" + symRiskRuleVar.Category().Id))
}
// simply add to a map (just convenience) where key is the category id and value the rule's execution function
ruleID := symRiskRuleVar.Category().Id
builtinRiskRulesPlugins[ruleID] = symRiskRuleVar
if *verbose {
fmt.Println("Risk rule loaded:", ruleID)
}
}
}
func analyze(context *gin.Context) {
execute(context, false)
}
func check(context *gin.Context) {
_, ok := execute(context, true)
if ok {
context.JSON(http.StatusOK, gin.H{
"message": "model is ok",
})
}
}
func execute(context *gin.Context, dryRun bool) (yamlContent []byte, ok bool) {
defer func() {
var err error
if r := recover(); r != nil {
errorCount++
err = r.(error)
log.Println(err)
context.JSON(http.StatusBadRequest, gin.H{
"error": strings.TrimSpace(err.Error()),
})
ok = false
}
}()
dpi, err := strconv.Atoi(context.DefaultQuery("dpi", strconv.Itoa(defaultGraphvizDPI)))
support.CheckErr(err)
fileUploaded, header, err := context.Request.FormFile("file")
support.CheckErr(err)
if header.Size > 50000000 {
msg := "maximum model upload file size exceeded (denial-of-service protection)"
log.Println(msg)
context.JSON(http.StatusRequestEntityTooLarge, gin.H{
"error": msg,
})
return yamlContent, false
}
filenameUploaded := strings.TrimSpace(header.Filename)
tmpInputDir, err := ioutil.TempDir(model.TempFolder, "threagile-input-")
support.CheckErr(err)
defer os.RemoveAll(tmpInputDir)
tmpModelFile, err := ioutil.TempFile(tmpInputDir, "threagile-model-*")
support.CheckErr(err)
defer os.Remove(tmpModelFile.Name())
_, err = io.Copy(tmpModelFile, fileUploaded)
support.CheckErr(err)
yamlFile := tmpModelFile.Name()
if strings.ToLower(filepath.Ext(filenameUploaded)) == ".zip" {
// unzip first (including the resources like images etc.)
if *verbose {
fmt.Println("Decompressing uploaded archive")
}
filenamesUnzipped, err := unzip(tmpModelFile.Name(), tmpInputDir)
support.CheckErr(err)
found := false
for _, name := range filenamesUnzipped {
if strings.ToLower(filepath.Ext(name)) == ".yaml" {
yamlFile = name
found = true
break
}
}
if !found {
panic(errors.New("no yaml file found in uploaded archive"))
}
}
tmpOutputDir, err := ioutil.TempDir(model.TempFolder, "threagile-output-")
support.CheckErr(err)
defer os.RemoveAll(tmpOutputDir)
tmpResultFile, err := ioutil.TempFile(model.TempFolder, "threagile-result-*.zip")
support.CheckErr(err)
defer os.Remove(tmpResultFile.Name())
if dryRun {
doItViaRuntimeCall(yamlFile, tmpOutputDir, *executeModelMacro, *raaPlugin, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, false, true, true, true, true, 40)
} else {
doItViaRuntimeCall(yamlFile, tmpOutputDir, *executeModelMacro, *raaPlugin, *skipRiskRules, *ignoreOrphanedRiskTracking, true, true, true, true, true, true, true, true, true, dpi)
}
support.CheckErr(err)
yamlContent, err = ioutil.ReadFile(yamlFile)
support.CheckErr(err)
err = ioutil.WriteFile(tmpOutputDir+"/threagile.yaml", yamlContent, 0400)
support.CheckErr(err)
if !dryRun {
files := []string{
tmpOutputDir + "/threagile.yaml",
tmpOutputDir + "/" + dataFlowDiagramFilenamePNG,
tmpOutputDir + "/" + dataAssetDiagramFilenamePNG,
tmpOutputDir + "/" + reportFilename,
tmpOutputDir + "/" + excelRisksFilename,
tmpOutputDir + "/" + excelTagsFilename,
tmpOutputDir + "/" + jsonRisksFilename,
tmpOutputDir + "/" + jsonTechnicalAssetsFilename,
tmpOutputDir + "/" + jsonStatsFilename,
}
if keepDiagramSourceFiles {
files = append(files, tmpOutputDir+"/"+dataFlowDiagramFilenameDOT)
files = append(files, tmpOutputDir+"/"+dataAssetDiagramFilenameDOT)
}
err = zipFiles(tmpResultFile.Name(), files)
support.CheckErr(err)
if *verbose {
log.Println("Streaming back result file: " + tmpResultFile.Name())
}
context.FileAttachment(tmpResultFile.Name(), "threagile-result.zip")
}
successCount++
return yamlContent, true
}
// ultimately to avoid any in-process memory and/or data leaks by the used third party libs like PDF generation: exec and quit
func doItViaRuntimeCall(modelFile string, outputDir string, executeModelMacro string, raaPlugin string, skipRiskRules string, ignoreOrphanedRiskTracking bool,
generateDataFlowDiagram, generateDataAssetDiagram, generateReportPdf, generateRisksExcel, generateTagsExcel, generateRisksJSON, generateTechnicalAssetsJSON, generateDefectdojo, generateStatsJSON bool,
dpi int) {
// Remember to also add the same args to the exec based sub-process calls!
var cmd *exec.Cmd
args := []string{"-model", modelFile, "-output", outputDir, "-execute-model-macro", executeModelMacro, "-raa-plugin", raaPlugin, "-skip-risk-rules", skipRiskRules, "-diagram-dpi", strconv.Itoa(dpi)}
if *verbose {
args = append(args, "-verbose")
}
if ignoreOrphanedRiskTracking { // TODO why add all them as arguments, when they are also variables on outer level?
args = append(args, "-ignore-orphaned-risk-tracking")
}
if generateDataFlowDiagram {
args = append(args, "-generate-data-flow-diagram")
}
if generateDataAssetDiagram {
args = append(args, "-generate-data-asset-diagram")
}
if generateReportPdf {
args = append(args, "-generate-report-pdf")
}
if generateDefectdojo {
args = append(args, "-generate-defectdojo-json")
}
if generateRisksExcel {
args = append(args, "-generate-risks-excel")
}
if generateTagsExcel {
args = append(args, "-generate-tags-excel")
}
if generateRisksJSON {
args = append(args, "-generate-risks-json")
}
if generateTechnicalAssetsJSON {
args = append(args, "-generate-technical-assets-json")
}
if generateStatsJSON {
args = append(args, "-generate-stats-json")
}
self := os.Args[0]
cmd = exec.Command(self, args...)
out, err := cmd.CombinedOutput()
if err != nil {
panic(errors.New(string(out)))
} else {
if *verbose && len(out) > 0 {
fmt.Println("---")
fmt.Print(string(out))
fmt.Println("---")
}
}
}
func startServer() {
router := gin.Default()
router.LoadHTMLGlob("server/static/*.html")
router.GET("/", func(c *gin.Context) {
c.HTML(http.StatusOK, "index.html", gin.H{})
})
router.HEAD("/", func(c *gin.Context) {
c.HTML(http.StatusOK, "index.html", gin.H{})
})
router.StaticFile("/threagile.png", "server/static/threagile.png")
router.StaticFile("/site.webmanifest", "server/static/site.webmanifest")
router.StaticFile("/favicon.ico", "server/static/favicon.ico")
router.StaticFile("/favicon-32x32.png", "server/static/favicon-32x32.png")
router.StaticFile("/favicon-16x16.png", "server/static/favicon-16x16.png")
router.StaticFile("/apple-touch-icon.png", "server/static/apple-touch-icon.png")
router.StaticFile("/android-chrome-512x512.png", "server/static/android-chrome-512x512.png")
router.StaticFile("/android-chrome-192x192.png", "server/static/android-chrome-192x192.png")
router.StaticFile("/schema.json", "schema.json")
router.StaticFile("/live-templates.txt", "live-templates.txt")
router.StaticFile("/openapi.yaml", "openapi.yaml")
router.StaticFile("/swagger-ui/", "server/static/swagger-ui/index.html")
router.StaticFile("/swagger-ui/index.html", "server/static/swagger-ui/index.html")
router.StaticFile("/swagger-ui/oauth2-redirect.html", "server/static/swagger-ui/oauth2-redirect.html")
router.StaticFile("/swagger-ui/swagger-ui.css", "server/static/swagger-ui/swagger-ui.css")
router.StaticFile("/swagger-ui/swagger-ui.js", "server/static/swagger-ui/swagger-ui.js")
router.StaticFile("/swagger-ui/swagger-ui-bundle.js", "server/static/swagger-ui/swagger-ui-bundle.js")
router.StaticFile("/swagger-ui/swagger-ui-standalone-preset.js", "server/static/swagger-ui/swagger-ui-standalone-preset.js")
router.GET("/threagile-example-model.yaml", exampleFile)
router.GET("/threagile-stub-model.yaml", stubFile)
router.GET("/meta/ping", func(c *gin.Context) {
c.JSON(200, gin.H{
"message": "pong",
})
})
router.GET("/meta/version", func(c *gin.Context) {
c.JSON(200, gin.H{
"version": model.ThreagileVersion,
"build_timestamp": buildTimestamp,
})
})
router.GET("/meta/types", func(c *gin.Context) {
c.JSON(200, gin.H{
"quantity": arrayOfStringValues(model.QuantityValues()),
"confidentiality": arrayOfStringValues(confidentiality.ConfidentialityValues()),
"criticality": arrayOfStringValues(criticality.CriticalityValues()),
"technical_asset_type": arrayOfStringValues(model.TechnicalAssetTypeValues()),
"technical_asset_size": arrayOfStringValues(model.TechnicalAssetSizeValues()),
"authorization": arrayOfStringValues(model.AuthorizationValues()),
"authentication": arrayOfStringValues(model.AuthenticationValues()),
"usage": arrayOfStringValues(model.UsageValues()),
"encryption": arrayOfStringValues(model.EncryptionStyleValues()),
"data_format": arrayOfStringValues(model.DataFormatValues()),
"protocol": arrayOfStringValues(model.ProtocolValues()),
"technical_asset_technology": arrayOfStringValues(model.TechnicalAssetTechnologyValues()),
"technical_asset_machine": arrayOfStringValues(model.TechnicalAssetMachineValues()),
"trust_boundary_type": arrayOfStringValues(model.TrustBoundaryTypeValues()),
"data_breach_probability": arrayOfStringValues(model.DataBreachProbabilityValues()),
"risk_severity": arrayOfStringValues(model.RiskSeverityValues()),
"risk_exploitation_likelihood": arrayOfStringValues(model.RiskExploitationLikelihoodValues()),
"risk_exploitation_impact": arrayOfStringValues(model.RiskExploitationImpactValues()),
"risk_function": arrayOfStringValues(model.RiskFunctionValues()),
"risk_status": arrayOfStringValues(model.RiskStatusValues()),
"stride": arrayOfStringValues(model.STRIDEValues()),
})
})
// TODO router.GET("/meta/risk-rules", listRiskRules)
// TODO router.GET("/meta/model-macros", listModelMacros)
router.GET("/meta/stats", stats)
router.POST("/direct/analyze", analyze)
router.POST("/direct/check", check)
router.GET("/direct/stub", stubFile)
router.POST("/auth/keys", createKey)
router.DELETE("/auth/keys", deleteKey)
router.POST("/auth/tokens", createToken)
router.DELETE("/auth/tokens", deleteToken)
router.POST("/models", createNewModel)
router.GET("/models", listModels)
router.DELETE("/models/:model-id", deleteModel)
router.GET("/models/:model-id", getModel)
router.PUT("/models/:model-id", importModel)
router.GET("/models/:model-id/data-flow-diagram", streamDataFlowDiagram)
router.GET("/models/:model-id/data-asset-diagram", streamDataAssetDiagram)
router.GET("/models/:model-id/report-pdf", streamReportPDF)
router.GET("/models/:model-id/risks-excel", streamRisksExcel)
router.GET("/models/:model-id/tags-excel", streamTagsExcel)
router.GET("/models/:model-id/risks", streamRisksJSON)
router.GET("/models/:model-id/technical-assets", streamTechnicalAssetsJSON)
router.GET("/models/:model-id/stats", streamStatsJSON)
router.GET("/models/:model-id/analysis", analyzeModelOnServerDirectly)
router.GET("/models/:model-id/cover", getCover)
router.PUT("/models/:model-id/cover", setCover)
router.GET("/models/:model-id/overview", getOverview)
router.PUT("/models/:model-id/overview", setOverview)
//router.GET("/models/:model-id/questions", getQuestions)
//router.PUT("/models/:model-id/questions", setQuestions)
router.GET("/models/:model-id/abuse-cases", getAbuseCases)
router.PUT("/models/:model-id/abuse-cases", setAbuseCases)
router.GET("/models/:model-id/security-requirements", getSecurityRequirements)
router.PUT("/models/:model-id/security-requirements", setSecurityRequirements)
//router.GET("/models/:model-id/tags", getTags)
//router.PUT("/models/:model-id/tags", setTags)
router.GET("/models/:model-id/data-assets", getDataAssets)
router.POST("/models/:model-id/data-assets", createNewDataAsset)
router.GET("/models/:model-id/data-assets/:data-asset-id", getDataAsset)
router.PUT("/models/:model-id/data-assets/:data-asset-id", setDataAsset)
router.DELETE("/models/:model-id/data-assets/:data-asset-id", deleteDataAsset)
router.GET("/models/:model-id/trust-boundaries", getTrustBoundaries)
// router.POST("/models/:model-id/trust-boundaries", createNewTrustBoundary)
// router.GET("/models/:model-id/trust-boundaries/:trust-boundary-id", getTrustBoundary)
// router.PUT("/models/:model-id/trust-boundaries/:trust-boundary-id", setTrustBoundary)
// router.DELETE("/models/:model-id/trust-boundaries/:trust-boundary-id", deleteTrustBoundary)
router.GET("/models/:model-id/shared-runtimes", getSharedRuntimes)
router.POST("/models/:model-id/shared-runtimes", createNewSharedRuntime)
router.GET("/models/:model-id/shared-runtimes/:shared-runtime-id", getSharedRuntime)
router.PUT("/models/:model-id/shared-runtimes/:shared-runtime-id", setSharedRuntime)
router.DELETE("/models/:model-id/shared-runtimes/:shared-runtime-id", deleteSharedRuntime)
fmt.Println("Threagile server running...")
router.Run(":" + strconv.Itoa(*serverPort)) // listen and serve on 0.0.0.0:8080 or whatever port was specified
}
func exampleFile(context *gin.Context) {
example, err := ioutil.ReadFile("/app/threagile-example-model.yaml")
support.CheckErr(err)
context.Data(http.StatusOK, gin.MIMEYAML, example)
}
func stubFile(context *gin.Context) {
stub, err := ioutil.ReadFile("/app/threagile-stub-model.yaml")
support.CheckErr(err)
context.Data(http.StatusOK, gin.MIMEYAML, addSupportedTags(stub)) // TODO use also the MIMEYAML way of serving YAML in model export?
}
func addSupportedTags(input []byte) []byte {
// add distinct tags as "tags_available"
supportedTags := make(map[string]bool, 0)
for _, riskRule := range builtinRiskRulesPlugins {
for _, tag := range riskRule.SupportedTags() {
supportedTags[strings.ToLower(tag)] = true
}
}
tags := make([]string, 0, len(supportedTags))
for t := range supportedTags {
tags = append(tags, t)
}
if len(tags) == 0 {
return input
}
sort.Strings(tags)
if *verbose {
fmt.Print("Supported tags of all risk rules: ")
for i, tag := range tags {
if i > 0 {
fmt.Print(", ")
}
fmt.Print(tag)
}
fmt.Println()
}
replacement := "tags_available:"
for _, tag := range tags {
replacement += "\n - " + tag
}
return []byte(strings.Replace(string(input), "tags_available:", replacement, 1))
}
const keySize = 32
type timeoutStruct struct {
xorRand []byte
createdNanotime, lastAcessedNanotime int64
}
var mapTokenHashToTimeoutStruct = make(map[string]timeoutStruct)
var mapFolderNameToTokenHash = make(map[string]string)
func createToken(context *gin.Context) {
folderName, key, ok := checkKeyToFolderName(context)
if !ok {
return
}
globalLock.Lock()
defer globalLock.Unlock()
if tokenHash, exists := mapFolderNameToTokenHash[folderName]; exists {
// invalidate previous token
delete(mapTokenHashToTimeoutStruct, tokenHash)
}
// create a strong random 256 bit value (used to xor)
xorBytesArr := make([]byte, keySize)
n, err := rand.Read(xorBytesArr[:])
if n != keySize || err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to create token",
})
return
}
now := time.Now().UnixNano()
token := xor(key, xorBytesArr)
tokenHash := hashSHA256(token)
housekeepingTokenMaps()
mapTokenHashToTimeoutStruct[tokenHash] = timeoutStruct{
xorRand: xorBytesArr,
createdNanotime: now,
lastAcessedNanotime: now,
}
mapFolderNameToTokenHash[folderName] = tokenHash
context.JSON(http.StatusCreated, gin.H{
"token": base64.RawURLEncoding.EncodeToString(token[:]),
})
}
func deleteToken(context *gin.Context) {
header := tokenHeader{}
if err := context.ShouldBindHeader(&header); err != nil {
context.JSON(http.StatusNotFound, gin.H{
"error": "token not found",
})
return
}
token, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Token))
if len(token) == 0 || err != nil {
if err != nil {
log.Println(err)
}
context.JSON(http.StatusNotFound, gin.H{
"error": "token not found",
})
return
}
globalLock.Lock()
defer globalLock.Unlock()
deleteTokenHashFromMaps(hashSHA256(token))
context.JSON(http.StatusOK, gin.H{
"message": "token deleted",
})
}
const extremeShortTimeoutsForTesting = false
func housekeepingTokenMaps() {
now := time.Now().UnixNano()
for tokenHash, val := range mapTokenHashToTimeoutStruct {
if extremeShortTimeoutsForTesting {
// remove all elements older than 1 minute (= 60000000000 ns) soft
// and all elements older than 3 minutes (= 180000000000 ns) hard
if now-val.lastAcessedNanotime > 60000000000 || now-val.createdNanotime > 180000000000 {
fmt.Println("About to remove a token hash from maps")
deleteTokenHashFromMaps(tokenHash)
}
} else {
// remove all elements older than 30 minutes (= 1800000000000 ns) soft
// and all elements older than 10 hours (= 36000000000000 ns) hard
if now-val.lastAcessedNanotime > 1800000000000 || now-val.createdNanotime > 36000000000000 {
deleteTokenHashFromMaps(tokenHash)
}
}
}
}
func deleteTokenHashFromMaps(tokenHash string) {
delete(mapTokenHashToTimeoutStruct, tokenHash)
for folderName, check := range mapFolderNameToTokenHash {
if check == tokenHash {
delete(mapFolderNameToTokenHash, folderName)
break
}
}
}
func xor(key []byte, xor []byte) []byte {
if len(key) != len(xor) {
panic(errors.New("key length not matching XOR length"))
}
result := make([]byte, len(xor))
for i, b := range key {
result[i] = b ^ xor[i]
}
return result
}
func analyzeModelOnServerDirectly(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer func() {
unlockFolder(folderNameOfKey)
var err error
if r := recover(); r != nil {
err = r.(error)
if *verbose {
log.Println(err)
}
log.Println(err)
context.JSON(http.StatusBadRequest, gin.H{
"error": strings.TrimSpace(err.Error()),
})
ok = false
}
}()
dpi, err := strconv.Atoi(context.DefaultQuery("dpi", strconv.Itoa(defaultGraphvizDPI)))
if err != nil {
handleErrorInServiceCall(err, context)
return
}
_, yamlText, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if !ok {
return
}
tmpModelFile, err := ioutil.TempFile(model.TempFolder, "threagile-direct-analyze-*")
if err != nil {
handleErrorInServiceCall(err, context)
return
}
defer os.Remove(tmpModelFile.Name())
tmpOutputDir, err := ioutil.TempDir(model.TempFolder, "threagile-direct-analyze-")
if err != nil {
handleErrorInServiceCall(err, context)
return
}
defer os.RemoveAll(tmpOutputDir)
tmpResultFile, err := ioutil.TempFile(model.TempFolder, "threagile-result-*.zip")
support.CheckErr(err)
defer os.Remove(tmpResultFile.Name())
err = ioutil.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400)
doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *skipRiskRules, *ignoreOrphanedRiskTracking, true, true, true, true, true, true, true, true, true, dpi)
if err != nil {
handleErrorInServiceCall(err, context)
return
}
err = ioutil.WriteFile(tmpOutputDir+"/threagile.yaml", []byte(yamlText), 0400)
if err != nil {
handleErrorInServiceCall(err, context)
return
}
files := []string{
tmpOutputDir + "/threagile.yaml",
tmpOutputDir + "/" + dataFlowDiagramFilenamePNG,
tmpOutputDir + "/" + dataAssetDiagramFilenamePNG,
tmpOutputDir + "/" + reportFilename,
tmpOutputDir + "/" + excelRisksFilename,
tmpOutputDir + "/" + excelTagsFilename,
tmpOutputDir + "/" + jsonRisksFilename,
tmpOutputDir + "/" + jsonTechnicalAssetsFilename,
tmpOutputDir + "/" + jsonStatsFilename,
}
if keepDiagramSourceFiles {
files = append(files, tmpOutputDir+"/"+dataFlowDiagramFilenameDOT)
files = append(files, tmpOutputDir+"/"+dataAssetDiagramFilenameDOT)
}
err = zipFiles(tmpResultFile.Name(), files)
support.CheckErr(err)
if *verbose {
fmt.Println("Streaming back result file: " + tmpResultFile.Name())
}
context.FileAttachment(tmpResultFile.Name(), "threagile-result.zip")
}
type responseType int
const (
dataFlowDiagram responseType = iota
dataAssetDiagram
reportPDF
risksExcel
tagsExcel
risksJSON
technicalAssetsJSON
statsJSON
)
func streamDataFlowDiagram(context *gin.Context) {
streamResponse(context, dataFlowDiagram)
}
func streamDataAssetDiagram(context *gin.Context) {
streamResponse(context, dataAssetDiagram)
}
func streamReportPDF(context *gin.Context) {
streamResponse(context, reportPDF)
}
func streamRisksExcel(context *gin.Context) {
streamResponse(context, risksExcel)
}
func streamTagsExcel(context *gin.Context) {
streamResponse(context, tagsExcel)
}
func streamRisksJSON(context *gin.Context) {
streamResponse(context, risksJSON)
}
func streamTechnicalAssetsJSON(context *gin.Context) {
streamResponse(context, technicalAssetsJSON)
}
func streamStatsJSON(context *gin.Context) {
streamResponse(context, statsJSON)
}
func streamResponse(context *gin.Context, responseType responseType) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer func() {
unlockFolder(folderNameOfKey)
var err error
if r := recover(); r != nil {
err = r.(error)
if *verbose {
log.Println(err)
}
log.Println(err)
context.JSON(http.StatusBadRequest, gin.H{
"error": strings.TrimSpace(err.Error()),
})
ok = false
}
}()
dpi, err := strconv.Atoi(context.DefaultQuery("dpi", strconv.Itoa(defaultGraphvizDPI)))
if err != nil {
handleErrorInServiceCall(err, context)
return
}
_, yamlText, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if !ok {
return
}
tmpModelFile, err := ioutil.TempFile(model.TempFolder, "threagile-render-*")
if err != nil {
handleErrorInServiceCall(err, context)
return
}
defer os.Remove(tmpModelFile.Name())
tmpOutputDir, err := ioutil.TempDir(model.TempFolder, "threagile-render-")
if err != nil {
handleErrorInServiceCall(err, context)
return
}
defer os.RemoveAll(tmpOutputDir)
err = ioutil.WriteFile(tmpModelFile.Name(), []byte(yamlText), 0400)
if responseType == dataFlowDiagram {
doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *skipRiskRules, *ignoreOrphanedRiskTracking, true, false, false, false, false, false, false, false, false, dpi)
if err != nil {
handleErrorInServiceCall(err, context)
return
}
context.File(tmpOutputDir + "/" + dataFlowDiagramFilenamePNG)
} else if responseType == dataAssetDiagram {
doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *skipRiskRules, *ignoreOrphanedRiskTracking, false, true, false, false, false, false, false, false, false, dpi)
if err != nil {
handleErrorInServiceCall(err, context)
return
}
context.File(tmpOutputDir + "/" + dataAssetDiagramFilenamePNG)
} else if responseType == reportPDF {
doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, true, false, false, false, false, false, false, dpi)
if err != nil {
handleErrorInServiceCall(err, context)
return
}
context.FileAttachment(tmpOutputDir+"/"+reportFilename, reportFilename)
} else if responseType == risksExcel {
doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, true, false, false, false, false, false, dpi)
if err != nil {
handleErrorInServiceCall(err, context)
return
}
context.FileAttachment(tmpOutputDir+"/"+excelRisksFilename, excelRisksFilename)
} else if responseType == tagsExcel {
doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, true, false, false, false, false, dpi)
if err != nil {
handleErrorInServiceCall(err, context)
return
}
context.FileAttachment(tmpOutputDir+"/"+excelTagsFilename, excelTagsFilename)
} else if responseType == risksJSON {
doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, false, true, false, false, false, dpi)
if err != nil {
handleErrorInServiceCall(err, context)
return
}
json, err := ioutil.ReadFile(tmpOutputDir + "/" + jsonRisksFilename)
if err != nil {
handleErrorInServiceCall(err, context)
return
}
context.Data(http.StatusOK, "application/json", json) // stream directly with JSON content-type in response instead of file download
} else if responseType == technicalAssetsJSON {
doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, false, true, true, false, false, dpi)
if err != nil {
handleErrorInServiceCall(err, context)
return
}
json, err := ioutil.ReadFile(tmpOutputDir + "/" + jsonTechnicalAssetsFilename)
if err != nil {
handleErrorInServiceCall(err, context)
return
}
context.Data(http.StatusOK, "application/json", json) // stream directly with JSON content-type in response instead of file download
} else if responseType == statsJSON {
doItViaRuntimeCall(tmpModelFile.Name(), tmpOutputDir, *executeModelMacro, *raaPlugin, *skipRiskRules, *ignoreOrphanedRiskTracking, false, false, false, false, false, false, false, true, false, dpi)
if err != nil {
handleErrorInServiceCall(err, context)
return
}
json, err := ioutil.ReadFile(tmpOutputDir + "/" + jsonStatsFilename)
if err != nil {
handleErrorInServiceCall(err, context)
return
}
context.Data(http.StatusOK, "application/json", json) // stream directly with JSON content-type in response instead of file download
}
}
// fully replaces threagile.yaml in sub-folder given by UUID
func importModel(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
uuid := context.Param("model-id") // UUID is syntactically validated in readModel+checkModelFolder (next line) via uuid.Parse(modelUUID)
_, _, ok = readModel(context, uuid, key, folderNameOfKey)
if ok {
// first analyze it simply by executing the full risk process (just discard the result) to ensure that everything would work
yamlContent, ok := execute(context, true)
if ok {
// if we're here, then no problem was raised, so ok to proceed
ok = writeModelYAML(context, string(yamlContent), key, folderNameForModel(folderNameOfKey, uuid), "Model Import", false)
if ok {
context.JSON(http.StatusCreated, gin.H{
"message": "model imported",
})
}
}
}
}
func stats(context *gin.Context) {
keyCount, modelCount := 0, 0
keyFolders, err := ioutil.ReadDir(baseFolder)
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to collect stats",
})
return
}
for _, keyFolder := range keyFolders {
if len(keyFolder.Name()) == 128 { // it's a sha512 token hash probably, so count it as token folder for the stats
keyCount++
modelFolders, err := ioutil.ReadDir(baseFolder + "/" + keyFolder.Name())
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to collect stats",
})
return
}
for _, modelFolder := range modelFolders {
if len(modelFolder.Name()) == 36 { // it's a uuid model folder probably, so count it as model folder for the stats
modelCount++
}
}
}
}
// TODO collect and deliver more stats (old model count?) and health info
context.JSON(http.StatusOK, gin.H{
"key_count": keyCount,
"model_count": modelCount,
"success_count": successCount,
"error_count": errorCount,
})
}
func getDataAsset(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
// yes, here keyed by title in YAML for better readability in the YAML file itself
for title, dataAsset := range modelInput.Data_assets {
if dataAsset.ID == context.Param("data-asset-id") {
context.JSON(http.StatusOK, gin.H{
title: dataAsset,
})
return
}
}
context.JSON(http.StatusNotFound, gin.H{
"error": "data asset not found",
})
}
}
func deleteDataAsset(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
referencesDeleted := false
// yes, here keyed by title in YAML for better readability in the YAML file itself
for title, dataAsset := range modelInput.Data_assets {
if dataAsset.ID == context.Param("data-asset-id") {
// also remove all usages of this data asset !!
for _, techAsset := range modelInput.Technical_assets {
if techAsset.Data_assets_processed != nil {
for i, parsedChangeCandidateAsset := range techAsset.Data_assets_processed {
referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset)
if referencedAsset == dataAsset.ID { // apply the removal
referencesDeleted = true
// Remove the element at index i
// TODO needs more testing
copy(techAsset.Data_assets_processed[i:], techAsset.Data_assets_processed[i+1:]) // Shift a[i+1:] left one index.
techAsset.Data_assets_processed[len(techAsset.Data_assets_processed)-1] = "" // Erase last element (write zero value).
techAsset.Data_assets_processed = techAsset.Data_assets_processed[:len(techAsset.Data_assets_processed)-1] // Truncate slice.
}
}
}
if techAsset.Data_assets_stored != nil {
for i, parsedChangeCandidateAsset := range techAsset.Data_assets_stored {
referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset)
if referencedAsset == dataAsset.ID { // apply the removal
referencesDeleted = true
// Remove the element at index i
// TODO needs more testing
copy(techAsset.Data_assets_stored[i:], techAsset.Data_assets_stored[i+1:]) // Shift a[i+1:] left one index.
techAsset.Data_assets_stored[len(techAsset.Data_assets_stored)-1] = "" // Erase last element (write zero value).
techAsset.Data_assets_stored = techAsset.Data_assets_stored[:len(techAsset.Data_assets_stored)-1] // Truncate slice.
}
}
}
if techAsset.Communication_links != nil {
for title, commLink := range techAsset.Communication_links {
for i, dataAssetSent := range commLink.Data_assets_sent {
referencedAsset := fmt.Sprintf("%v", dataAssetSent)
if referencedAsset == dataAsset.ID { // apply the removal
referencesDeleted = true
// Remove the element at index i
// TODO needs more testing
copy(techAsset.Communication_links[title].Data_assets_sent[i:], techAsset.Communication_links[title].Data_assets_sent[i+1:]) // Shift a[i+1:] left one index.
techAsset.Communication_links[title].Data_assets_sent[len(techAsset.Communication_links[title].Data_assets_sent)-1] = "" // Erase last element (write zero value).
x := techAsset.Communication_links[title]
x.Data_assets_sent = techAsset.Communication_links[title].Data_assets_sent[:len(techAsset.Communication_links[title].Data_assets_sent)-1] // Truncate slice.
techAsset.Communication_links[title] = x
}
}
for i, dataAssetReceived := range commLink.Data_assets_received {
referencedAsset := fmt.Sprintf("%v", dataAssetReceived)
if referencedAsset == dataAsset.ID { // apply the removal
referencesDeleted = true
// Remove the element at index i
// TODO needs more testing
copy(techAsset.Communication_links[title].Data_assets_received[i:], techAsset.Communication_links[title].Data_assets_received[i+1:]) // Shift a[i+1:] left one index.
techAsset.Communication_links[title].Data_assets_received[len(techAsset.Communication_links[title].Data_assets_received)-1] = "" // Erase last element (write zero value).
x := techAsset.Communication_links[title]
x.Data_assets_received = techAsset.Communication_links[title].Data_assets_received[:len(techAsset.Communication_links[title].Data_assets_received)-1] // Truncate slice.
techAsset.Communication_links[title] = x
}
}
}
}
}
for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories {
if indivRiskCat.Risks_identified != nil {
for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified {
if indivRiskInstance.Most_relevant_data_asset == dataAsset.ID { // apply the removal
referencesDeleted = true
x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle]
x.Most_relevant_data_asset = "" // TODO needs more testing
modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x
}
}
}
}
// remove it itself
delete(modelInput.Data_assets, title)
ok = writeModel(context, key, folderNameOfKey, &modelInput, "Data Asset Deletion")
if ok {
context.JSON(http.StatusOK, gin.H{
"message": "data asset deleted",
"id": dataAsset.ID,
"references_deleted": referencesDeleted, // in order to signal to clients, that other model parts might've been deleted as well
})
}
return
}
}
context.JSON(http.StatusNotFound, gin.H{
"error": "data asset not found",
})
}
}
func setSharedRuntime(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
// yes, here keyed by title in YAML for better readability in the YAML file itself
for title, sharedRuntime := range modelInput.Shared_runtimes {
if sharedRuntime.ID == context.Param("shared-runtime-id") {
payload := payloadSharedRuntime{}
err := context.BindJSON(&payload)
if err != nil {
log.Println(err)
context.JSON(http.StatusBadRequest, gin.H{
"error": "unable to parse request payload",
})
return
}
sharedRuntimeInput, ok := populateSharedRuntime(context, payload)
if !ok {
return
}
// in order to also update the title, remove the shared runtime from the map and re-insert it (with new key)
delete(modelInput.Shared_runtimes, title)
modelInput.Shared_runtimes[payload.Title] = sharedRuntimeInput
idChanged := sharedRuntimeInput.ID != sharedRuntime.ID
if idChanged { // ID-CHANGE-PROPAGATION
for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories {
if indivRiskCat.Risks_identified != nil {
for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified {
if indivRiskInstance.Most_relevant_shared_runtime == sharedRuntime.ID { // apply the ID change
x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle]
x.Most_relevant_shared_runtime = sharedRuntimeInput.ID // TODO needs more testing
modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x
}
}
}
}
}
ok = writeModel(context, key, folderNameOfKey, &modelInput, "Shared Runtime Update")
if ok {
context.JSON(http.StatusOK, gin.H{
"message": "shared runtime updated",
"id": sharedRuntimeInput.ID,
"id_changed": idChanged, // in order to signal to clients, that other model parts might've received updates as well and should be reloaded
})
}
return
}
}
context.JSON(http.StatusNotFound, gin.H{
"error": "shared runtime not found",
})
}
}
func setDataAsset(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
// yes, here keyed by title in YAML for better readability in the YAML file itself
for title, dataAsset := range modelInput.Data_assets {
if dataAsset.ID == context.Param("data-asset-id") {
payload := payloadDataAsset{}
err := context.BindJSON(&payload)
if err != nil {
log.Println(err)
context.JSON(http.StatusBadRequest, gin.H{
"error": "unable to parse request payload",
})
return
}
dataAssetInput, ok := populateDataAsset(context, payload)
if !ok {
return
}
// in order to also update the title, remove the asset from the map and re-insert it (with new key)
delete(modelInput.Data_assets, title)
modelInput.Data_assets[payload.Title] = dataAssetInput
idChanged := dataAssetInput.ID != dataAsset.ID
if idChanged { // ID-CHANGE-PROPAGATION
// also update all usages to point to the new (changed) ID !!
for techAssetTitle, techAsset := range modelInput.Technical_assets {
if techAsset.Data_assets_processed != nil {
for i, parsedChangeCandidateAsset := range techAsset.Data_assets_processed {
referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset)
if referencedAsset == dataAsset.ID { // apply the ID change
modelInput.Technical_assets[techAssetTitle].Data_assets_processed[i] = dataAssetInput.ID
}
}
}
if techAsset.Data_assets_stored != nil {
for i, parsedChangeCandidateAsset := range techAsset.Data_assets_stored {
referencedAsset := fmt.Sprintf("%v", parsedChangeCandidateAsset)
if referencedAsset == dataAsset.ID { // apply the ID change
modelInput.Technical_assets[techAssetTitle].Data_assets_stored[i] = dataAssetInput.ID
}
}
}
if techAsset.Communication_links != nil {
for title, commLink := range techAsset.Communication_links {
for i, dataAssetSent := range commLink.Data_assets_sent {
referencedAsset := fmt.Sprintf("%v", dataAssetSent)
if referencedAsset == dataAsset.ID { // apply the ID change
modelInput.Technical_assets[techAssetTitle].Communication_links[title].Data_assets_sent[i] = dataAssetInput.ID
}
}
for i, dataAssetReceived := range commLink.Data_assets_received {
referencedAsset := fmt.Sprintf("%v", dataAssetReceived)
if referencedAsset == dataAsset.ID { // apply the ID change
modelInput.Technical_assets[techAssetTitle].Communication_links[title].Data_assets_received[i] = dataAssetInput.ID
}
}
}
}
}
for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories {
if indivRiskCat.Risks_identified != nil {
for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified {
if indivRiskInstance.Most_relevant_data_asset == dataAsset.ID { // apply the ID change
x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle]
x.Most_relevant_data_asset = dataAssetInput.ID // TODO needs more testing
modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x
}
}
}
}
}
ok = writeModel(context, key, folderNameOfKey, &modelInput, "Data Asset Update")
if ok {
context.JSON(http.StatusOK, gin.H{
"message": "data asset updated",
"id": dataAssetInput.ID,
"id_changed": idChanged, // in order to signal to clients, that other model parts might've received updates as well and should be reloaded
})
}
return
}
}
context.JSON(http.StatusNotFound, gin.H{
"error": "data asset not found",
})
}
}
func getSharedRuntime(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
// yes, here keyed by title in YAML for better readability in the YAML file itself
for title, sharedRuntime := range modelInput.Shared_runtimes {
if sharedRuntime.ID == context.Param("shared-runtime-id") {
context.JSON(http.StatusOK, gin.H{
title: sharedRuntime,
})
return
}
}
context.JSON(http.StatusNotFound, gin.H{
"error": "shared runtime not found",
})
}
}
func createNewSharedRuntime(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
payload := payloadSharedRuntime{}
err := context.BindJSON(&payload)
if err != nil {
log.Println(err)
context.JSON(http.StatusBadRequest, gin.H{
"error": "unable to parse request payload",
})
return
}
// yes, here keyed by title in YAML for better readability in the YAML file itself
if _, exists := modelInput.Shared_runtimes[payload.Title]; exists {
context.JSON(http.StatusConflict, gin.H{
"error": "shared runtime with this title already exists",
})
return
}
// but later it will in memory keyed by it's "id", so do this uniqueness check also
for _, runtime := range modelInput.Shared_runtimes {
if runtime.ID == payload.Id {
context.JSON(http.StatusConflict, gin.H{
"error": "shared runtime with this id already exists",
})
return
}
}
if !checkTechnicalAssetsExisting(modelInput, payload.Technical_assets_running) {
context.JSON(http.StatusBadRequest, gin.H{
"error": "referenced technical asset does not exist",
})
return
}
sharedRuntimeInput, ok := populateSharedRuntime(context, payload)
if !ok {
return
}
if modelInput.Shared_runtimes == nil {
modelInput.Shared_runtimes = make(map[string]model.InputSharedRuntime)
}
modelInput.Shared_runtimes[payload.Title] = sharedRuntimeInput
ok = writeModel(context, key, folderNameOfKey, &modelInput, "Shared Runtime Creation")
if ok {
context.JSON(http.StatusOK, gin.H{
"message": "shared runtime created",
"id": sharedRuntimeInput.ID,
})
}
}
}
func checkTechnicalAssetsExisting(modelInput model.ModelInput, techAssetIDs []string) (ok bool) {
for _, techAssetID := range techAssetIDs {
exists := false
for _, val := range modelInput.Technical_assets {
if val.ID == techAssetID {
exists = true
break
}
}
if !exists {
return false
}
}
return true
}
func populateSharedRuntime(context *gin.Context, payload payloadSharedRuntime) (sharedRuntimeInput model.InputSharedRuntime, ok bool) {
sharedRuntimeInput = model.InputSharedRuntime{
ID: payload.Id,
Description: payload.Description,
Tags: support.LowerCaseAndTrim(payload.Tags),
Technical_assets_running: payload.Technical_assets_running,
}
return sharedRuntimeInput, true
}
func deleteSharedRuntime(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
referencesDeleted := false
// yes, here keyed by title in YAML for better readability in the YAML file itself
for title, sharedRuntime := range modelInput.Shared_runtimes {
if sharedRuntime.ID == context.Param("shared-runtime-id") {
// also remove all usages of this shared runtime !!
for indivRiskCatTitle, indivRiskCat := range modelInput.Individual_risk_categories {
if indivRiskCat.Risks_identified != nil {
for indivRiskInstanceTitle, indivRiskInstance := range indivRiskCat.Risks_identified {
if indivRiskInstance.Most_relevant_shared_runtime == sharedRuntime.ID { // apply the removal
referencesDeleted = true
x := modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle]
x.Most_relevant_shared_runtime = "" // TODO needs more testing
modelInput.Individual_risk_categories[indivRiskCatTitle].Risks_identified[indivRiskInstanceTitle] = x
}
}
}
}
// remove it itself
delete(modelInput.Shared_runtimes, title)
ok = writeModel(context, key, folderNameOfKey, &modelInput, "Shared Runtime Deletion")
if ok {
context.JSON(http.StatusOK, gin.H{
"message": "shared runtime deleted",
"id": sharedRuntime.ID,
"references_deleted": referencesDeleted, // in order to signal to clients, that other model parts might've been deleted as well
})
}
return
}
}
context.JSON(http.StatusNotFound, gin.H{
"error": "shared runtime not found",
})
}
}
func createNewDataAsset(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
payload := payloadDataAsset{}
err := context.BindJSON(&payload)
if err != nil {
log.Println(err)
context.JSON(http.StatusBadRequest, gin.H{
"error": "unable to parse request payload",
})
return
}
// yes, here keyed by title in YAML for better readability in the YAML file itself
if _, exists := modelInput.Data_assets[payload.Title]; exists {
context.JSON(http.StatusConflict, gin.H{
"error": "data asset with this title already exists",
})
return
}
// but later it will in memory keyed by it's "id", so do this uniqueness check also
for _, asset := range modelInput.Data_assets {
if asset.ID == payload.Id {
context.JSON(http.StatusConflict, gin.H{
"error": "data asset with this id already exists",
})
return
}
}
dataAssetInput, ok := populateDataAsset(context, payload)
if !ok {
return
}
if modelInput.Data_assets == nil {
modelInput.Data_assets = make(map[string]model.InputDataAsset)
}
modelInput.Data_assets[payload.Title] = dataAssetInput
ok = writeModel(context, key, folderNameOfKey, &modelInput, "Data Asset Creation")
if ok {
context.JSON(http.StatusOK, gin.H{
"message": "data asset created",
"id": dataAssetInput.ID,
})
}
}
}
func populateDataAsset(context *gin.Context, payload payloadDataAsset) (dataAssetInput model.InputDataAsset, ok bool) {
usage, err := model.ParseUsage(payload.Usage)
if err != nil {
handleErrorInServiceCall(err, context)
return dataAssetInput, false
}
quantity, err := model.ParseQuantity(payload.Quantity)
if err != nil {
handleErrorInServiceCall(err, context)
return dataAssetInput, false
}
confidentiality, err := confidentiality.ParseConfidentiality(payload.Confidentiality)
if err != nil {
handleErrorInServiceCall(err, context)
return dataAssetInput, false
}
integrity, err := criticality.ParseCriticality(payload.Integrity)
if err != nil {
handleErrorInServiceCall(err, context)
return dataAssetInput, false
}
availability, err := criticality.ParseCriticality(payload.Availability)
if err != nil {
handleErrorInServiceCall(err, context)
return dataAssetInput, false
}
dataAssetInput = model.InputDataAsset{
ID: payload.Id,
Description: payload.Description,
Usage: usage.String(),
Tags: support.LowerCaseAndTrim(payload.Tags),
Origin: payload.Origin,
Owner: payload.Owner,
Quantity: quantity.String(),
Confidentiality: confidentiality.String(),
Integrity: integrity.String(),
Availability: availability.String(),
Justification_cia_rating: payload.Justification_cia_rating,
}
return dataAssetInput, true
}
func getDataAssets(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
context.JSON(http.StatusOK, model.Data_assets)
}
}
func getTrustBoundaries(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
context.JSON(http.StatusOK, model.Trust_boundaries)
}
}
func getSharedRuntimes(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
context.JSON(http.StatusOK, model.Shared_runtimes)
}
}
func arrayOfStringValues(values []core.TypeEnum) []string {
result := make([]string, 0)
for _, value := range values {
result = append(result, value.String())
}
return result
}
func getModel(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
_, yamlText, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
tmpResultFile, err := ioutil.TempFile(model.TempFolder, "threagile-*.yaml")
support.CheckErr(err)
err = ioutil.WriteFile(tmpResultFile.Name(), []byte(yamlText), 0400)
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to stream model file",
})
return
}
defer os.Remove(tmpResultFile.Name())
context.FileAttachment(tmpResultFile.Name(), "threagile.yaml")
}
}
type payloadModels struct {
ID string `json:"id"`
Title string `json:"title"`
Timestamp_created time.Time `json:"timestamp_created"`
Timestamp_modified time.Time `json:"timestamp_modified"`
}
type payloadCover struct {
Title string `json:"title"`
Date time.Time `json:"date"`
Author model.Author `json:"author"`
}
type payloadOverview struct {
Management_summary_comment string `json:"management_summary_comment"`
Business_criticality string `json:"business_criticality"`
Business_overview model.Overview `json:"business_overview"`
Technical_overview model.Overview `json:"technical_overview"`
}
type payloadAbuseCases map[string]string
type payloadSecurityRequirements map[string]string
type payloadDataAsset struct {
Title string `json:"title"`
Id string `json:"id"`
Description string `json:"description"`
Usage string `json:"usage"`
Tags []string `json:"tags"`
Origin string `json:"origin"`
Owner string `json:"owner"`
Quantity string `json:"quantity"`
Confidentiality string `json:"confidentiality"`
Integrity string `json:"integrity"`
Availability string `json:"availability"`
Justification_cia_rating string `json:"justification_cia_rating"`
}
type payloadSharedRuntime struct {
Title string `json:"title"`
Id string `json:"id"`
Description string `json:"description"`
Tags []string `json:"tags"`
Technical_assets_running []string `json:"technical_assets_running"`
}
func setSecurityRequirements(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
payload := payloadSecurityRequirements{}
err := context.BindJSON(&payload)
if err != nil {
log.Println(err)
context.JSON(http.StatusBadRequest, gin.H{
"error": "unable to parse request payload",
})
return
}
modelInput.Security_requirements = payload
ok = writeModel(context, key, folderNameOfKey, &modelInput, "Security Requirements Update")
if ok {
context.JSON(http.StatusOK, gin.H{
"message": "model updated",
})
}
}
}
func getSecurityRequirements(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
context.JSON(http.StatusOK, model.Security_requirements)
}
}
func setAbuseCases(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
payload := payloadAbuseCases{}
err := context.BindJSON(&payload)
if err != nil {
log.Println(err)
context.JSON(http.StatusBadRequest, gin.H{
"error": "unable to parse request payload",
})
return
}
modelInput.Abuse_cases = payload
ok = writeModel(context, key, folderNameOfKey, &modelInput, "Abuse Cases Update")
if ok {
context.JSON(http.StatusOK, gin.H{
"message": "model updated",
})
}
}
}
func getAbuseCases(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
context.JSON(http.StatusOK, model.Abuse_cases)
}
}
func setOverview(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
payload := payloadOverview{}
err := context.BindJSON(&payload)
if err != nil {
log.Println(err)
context.JSON(http.StatusBadRequest, gin.H{
"error": "unable to parse request payload",
})
return
}
criticality, err := criticality.ParseCriticality(payload.Business_criticality)
if err != nil {
handleErrorInServiceCall(err, context)
return
}
modelInput.Management_summary_comment = payload.Management_summary_comment
modelInput.Business_criticality = criticality.String()
modelInput.Business_overview.Description = payload.Business_overview.Description
modelInput.Business_overview.Images = payload.Business_overview.Images
modelInput.Technical_overview.Description = payload.Technical_overview.Description
modelInput.Technical_overview.Images = payload.Technical_overview.Images
ok = writeModel(context, key, folderNameOfKey, &modelInput, "Overview Update")
if ok {
context.JSON(http.StatusOK, gin.H{
"message": "model updated",
})
}
}
}
func handleErrorInServiceCall(err error, context *gin.Context) {
log.Println(err)
context.JSON(http.StatusBadRequest, gin.H{
"error": strings.TrimSpace(err.Error()),
})
}
func getOverview(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
context.JSON(http.StatusOK, gin.H{
"management_summary_comment": model.Management_summary_comment,
"business_criticality": model.Business_criticality,
"business_overview": model.Business_overview,
"technical_overview": model.Technical_overview,
})
}
}
func setCover(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
modelInput, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
payload := payloadCover{}
err := context.BindJSON(&payload)
if err != nil {
context.JSON(http.StatusBadRequest, gin.H{
"error": "unable to parse request payload",
})
return
}
modelInput.Title = payload.Title
if !payload.Date.IsZero() {
modelInput.Date = payload.Date.Format("2006-01-02")
}
modelInput.Author.Name = payload.Author.Name
modelInput.Author.Homepage = payload.Author.Homepage
ok = writeModel(context, key, folderNameOfKey, &modelInput, "Cover Update")
if ok {
context.JSON(http.StatusOK, gin.H{
"message": "model updated",
})
}
}
}
func getCover(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
model, _, ok := readModel(context, context.Param("model-id"), key, folderNameOfKey)
if ok {
context.JSON(http.StatusOK, gin.H{
"title": model.Title,
"date": model.Date,
"author": model.Author,
})
}
}
// creates a sub-folder (named by a new UUID) inside the token folder
func createNewModel(context *gin.Context) {
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
ok = checkObjectCreationThrottler(context, "MODEL")
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
uuid := uuid.New().String()
err := os.Mkdir(folderNameForModel(folderNameOfKey, uuid), 0700)
if err != nil {
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to create model",
})
return
}
yaml := `title: New Threat Model
threagile_version: ` + model.ThreagileVersion + `
author:
name: ""
homepage: ""
date:
business_overview:
description: ""
images: []
technical_overview:
description: ""
images: []
business_criticality: ""
management_summary_comment: ""
questions: {}
abuse_cases: {}
security_requirements: {}
tags_available: []
data_assets: {}
technical_assets: {}
trust_boundaries: {}
shared_runtimes: {}
individual_risk_categories: {}
risk_tracking: {}
diagram_tweak_nodesep: ""
diagram_tweak_ranksep: ""
diagram_tweak_edge_layout: ""
diagram_tweak_suppress_edge_labels: false
diagram_tweak_invisible_connections_between_assets: []
diagram_tweak_same_rank_assets: []`
ok = writeModelYAML(context, yaml, key, folderNameForModel(folderNameOfKey, uuid), "New Model Creation", true)
if ok {
context.JSON(http.StatusCreated, gin.H{
"message": "model created",
"id": uuid,
})
}
}
func listModels(context *gin.Context) { // TODO currently returns error when any model is no longer valid in syntax, so eventually have some fallback to not just bark on an invalid model...
folderNameOfKey, key, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
result := make([]payloadModels, 0)
modelFolders, err := ioutil.ReadDir(folderNameOfKey)
if err != nil {
log.Println(err)
context.JSON(http.StatusNotFound, gin.H{
"error": "token not found",
})
return
}
for _, fileInfo := range modelFolders {
if fileInfo.IsDir() {
modelStat, err := os.Stat(folderNameOfKey + "/" + fileInfo.Name() + "/threagile.yaml")
if err != nil {
log.Println(err)
context.JSON(http.StatusNotFound, gin.H{
"error": "unable to list model",
})
return
}
model, _, ok := readModel(context, fileInfo.Name(), key, folderNameOfKey)
if !ok {
return
}
result = append(result, payloadModels{
ID: fileInfo.Name(),
Title: model.Title,
Timestamp_created: fileInfo.ModTime(),
Timestamp_modified: modelStat.ModTime(),
})
}
}
context.JSON(http.StatusOK, result)
}
func deleteModel(context *gin.Context) {
folderNameOfKey, _, ok := checkTokenToFolderName(context)
if !ok {
return
}
lockFolder(folderNameOfKey)
defer unlockFolder(folderNameOfKey)
folder, ok := checkModelFolder(context, context.Param("model-id"), folderNameOfKey)
if ok {
err := os.RemoveAll(folder)
if err != nil {
context.JSON(http.StatusNotFound, gin.H{
"error": "model not found",
})
}
context.JSON(http.StatusOK, gin.H{
"message": "model deleted",
})
}
}
func checkModelFolder(context *gin.Context, modelUUID string, folderNameOfKey string) (modelFolder string, ok bool) |
func readModel(context *gin.Context, modelUUID string, key []byte, folderNameOfKey string) (modelInputResult model.ModelInput, yamlText string, ok bool) {
modelFolder, ok := checkModelFolder(context, modelUUID, folderNameOfKey)
if !ok {
return modelInputResult, yamlText, false
}
cryptoKey := generateKeyFromAlreadyStrongRandomInput(key)
block, err := aes.NewCipher(cryptoKey)
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to open model",
})
return modelInputResult, yamlText, false
}
aesgcm, err := cipher.NewGCM(block)
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to open model",
})
return modelInputResult, yamlText, false
}
fileBytes, err := ioutil.ReadFile(modelFolder + "/threagile.yaml")
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to open model",
})
return modelInputResult, yamlText, false
}
nonce := fileBytes[0:12]
ciphertext := fileBytes[12:]
plaintext, err := aesgcm.Open(nil, nonce, ciphertext, nil)
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to open model",
})
return modelInputResult, yamlText, false
}
r, err := gzip.NewReader(bytes.NewReader(plaintext))
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to open model",
})
return modelInputResult, yamlText, false
}
buf := new(bytes.Buffer)
buf.ReadFrom(r)
modelInput := model.ModelInput{}
yamlBytes := buf.Bytes()
err = yaml.Unmarshal(yamlBytes, &modelInput)
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to open model",
})
return modelInputResult, yamlText, false
}
return modelInput, string(yamlBytes), true
}
func writeModel(context *gin.Context, key []byte, folderNameOfKey string, modelInput *model.ModelInput, changeReasonForHistory string) (ok bool) {
modelFolder, ok := checkModelFolder(context, context.Param("model-id"), folderNameOfKey)
if ok {
modelInput.Threagile_version = model.ThreagileVersion
yamlBytes, err := yaml.Marshal(modelInput)
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to write model",
})
return false
}
/*
yamlBytes = model.ReformatYAML(yamlBytes)
*/
return writeModelYAML(context, string(yamlBytes), key, modelFolder, changeReasonForHistory, false)
}
return false
}
func writeModelYAML(context *gin.Context, yaml string, key []byte, modelFolder string, changeReasonForHistory string, skipBackup bool) (ok bool) {
if *verbose {
fmt.Println("about to write " + strconv.Itoa(len(yaml)) + " bytes of yaml into model folder: " + modelFolder)
}
var b bytes.Buffer
w := gzip.NewWriter(&b)
w.Write([]byte(yaml))
w.Close()
plaintext := b.Bytes()
cryptoKey := generateKeyFromAlreadyStrongRandomInput(key)
block, err := aes.NewCipher(cryptoKey)
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to write model",
})
return false
}
// Never use more than 2^32 random nonces with a given key because of the risk of a repeat.
nonce := make([]byte, 12)
if _, err := io.ReadFull(rand.Reader, nonce); err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to write model",
})
return false
}
aesgcm, err := cipher.NewGCM(block)
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to write model",
})
return false
}
ciphertext := aesgcm.Seal(nil, nonce, plaintext, nil)
if !skipBackup {
err = backupModelToHistory(modelFolder, changeReasonForHistory)
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to write model",
})
return false
}
}
f, err := os.Create(modelFolder + "/threagile.yaml")
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to write model",
})
return false
}
f.Write(nonce)
f.Write(ciphertext)
f.Close()
return true
}
func backupModelToHistory(modelFolder string, changeReasonForHistory string) (err error) {
historyFolder := modelFolder + "/history"
if _, err := os.Stat(historyFolder); os.IsNotExist(err) {
err = os.Mkdir(historyFolder, 0700)
if err != nil {
return err
}
}
input, err := ioutil.ReadFile(modelFolder + "/threagile.yaml")
if err != nil {
return err
}
historyFile := historyFolder + "/" + time.Now().Format("2006-01-02 15:04:05") + " " + changeReasonForHistory + ".backup"
err = ioutil.WriteFile(historyFile, input, 0400)
if err != nil {
return err
}
// now delete any old files if over limit to keep
files, err := ioutil.ReadDir(historyFolder)
if err != nil {
return err
}
if len(files) > backupHistoryFilesToKeep {
requiredToDelete := len(files) - backupHistoryFilesToKeep
sort.Slice(files, func(i, j int) bool {
return files[i].Name() < files[j].Name()
})
for _, file := range files {
requiredToDelete--
err = os.Remove(historyFolder + "/" + file.Name())
if err != nil {
return err
}
if requiredToDelete <= 0 {
break
}
}
}
return
}
type argon2Params struct {
memory uint32
iterations uint32
parallelism uint8
saltLength uint32
keyLength uint32
}
func generateKeyFromAlreadyStrongRandomInput(alreadyRandomInput []byte) []byte {
// Establish the parameters to use for Argon2.
p := &argon2Params{
memory: 64 * 1024,
iterations: 3,
parallelism: 2,
saltLength: 16,
keyLength: keySize,
}
// As the input is already cryptographically secure random, the salt is simply the first n bytes
salt := alreadyRandomInput[0:p.saltLength]
hash := argon2.IDKey(alreadyRandomInput[p.saltLength:], salt, p.iterations, p.memory, p.parallelism, p.keyLength)
return hash
}
func folderNameForModel(folderNameOfKey string, uuid string) string {
return folderNameOfKey + "/" + uuid
}
var throttlerLock sync.Mutex
var createdObjectsThrottler = make(map[string][]int64)
func checkObjectCreationThrottler(context *gin.Context, typeName string) bool {
throttlerLock.Lock()
defer throttlerLock.Unlock()
// remove all elements older than 3 minutes (= 180000000000 ns)
now := time.Now().UnixNano()
cutoff := now - 180000000000
for keyCheck, _ := range createdObjectsThrottler {
for i := 0; i < len(createdObjectsThrottler[keyCheck]); i++ {
if createdObjectsThrottler[keyCheck][i] < cutoff {
// Remove the element at index i from slice (safe while looping using i as iterator)
createdObjectsThrottler[keyCheck] = append(createdObjectsThrottler[keyCheck][:i], createdObjectsThrottler[keyCheck][i+1:]...)
i-- // Since we just deleted a[i], we must redo that index
}
}
length := len(createdObjectsThrottler[keyCheck])
if length == 0 {
delete(createdObjectsThrottler, keyCheck)
}
/*
if *verbose {
log.Println("Throttling count: "+strconv.Itoa(length))
}
*/
}
// check current request
keyHash := support.Hash(typeName) // getting the real client ip is not easy inside fully encapsulated containerized runtime
if _, ok := createdObjectsThrottler[keyHash]; !ok {
createdObjectsThrottler[keyHash] = make([]int64, 0)
}
// check the limit of 20 creations for this type per 3 minutes
withinLimit := len(createdObjectsThrottler[keyHash]) < 20
if withinLimit {
createdObjectsThrottler[keyHash] = append(createdObjectsThrottler[keyHash], now)
return true
}
context.JSON(http.StatusTooManyRequests, gin.H{
"error": "object creation throttling exceeded (denial-of-service protection): please wait some time and try again",
})
return false
}
var locksByFolderName = make(map[string]*sync.Mutex)
func lockFolder(folderName string) {
globalLock.Lock()
defer globalLock.Unlock()
_, exists := locksByFolderName[folderName]
if !exists {
locksByFolderName[folderName] = &sync.Mutex{}
}
locksByFolderName[folderName].Lock()
}
func unlockFolder(folderName string) {
if _, exists := locksByFolderName[folderName]; exists {
locksByFolderName[folderName].Unlock()
delete(locksByFolderName, folderName)
}
}
type tokenHeader struct {
Token string `header:"token"`
}
type keyHeader struct {
Key string `header:"key"`
}
func folderNameFromKey(key []byte) string {
sha512Hash := hashSHA256(key)
return baseFolder + "/" + sha512Hash
}
func hashSHA256(key []byte) string {
hasher := sha512.New()
hasher.Write(key)
return hex.EncodeToString(hasher.Sum(nil))
}
func createKey(context *gin.Context) {
ok := checkObjectCreationThrottler(context, "KEY")
if !ok {
return
}
globalLock.Lock()
defer globalLock.Unlock()
keyBytesArr := make([]byte, keySize)
n, err := rand.Read(keyBytesArr[:])
if n != keySize || err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to create key",
})
return
}
err = os.Mkdir(folderNameFromKey(keyBytesArr), 0700)
if err != nil {
log.Println(err)
context.JSON(http.StatusInternalServerError, gin.H{
"error": "unable to create key",
})
return
}
context.JSON(http.StatusCreated, gin.H{
"key": base64.RawURLEncoding.EncodeToString(keyBytesArr[:]),
})
}
func checkTokenToFolderName(context *gin.Context) (folderNameOfKey string, key []byte, ok bool) {
header := tokenHeader{}
if err := context.ShouldBindHeader(&header); err != nil {
log.Println(err)
context.JSON(http.StatusNotFound, gin.H{
"error": "token not found",
})
return folderNameOfKey, key, false
}
token, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Token))
if len(token) == 0 || err != nil {
if err != nil {
log.Println(err)
}
context.JSON(http.StatusNotFound, gin.H{
"error": "token not found",
})
return folderNameOfKey, key, false
}
globalLock.Lock()
defer globalLock.Unlock()
housekeepingTokenMaps() // to remove timed-out ones
tokenHash := hashSHA256(token)
if timeoutStruct, exists := mapTokenHashToTimeoutStruct[tokenHash]; exists {
// re-create the key from token
key := xor(token, timeoutStruct.xorRand)
folderNameOfKey := folderNameFromKey(key)
if _, err := os.Stat(folderNameOfKey); os.IsNotExist(err) {
log.Println(err)
context.JSON(http.StatusNotFound, gin.H{
"error": "token not found",
})
return folderNameOfKey, key, false
}
timeoutStruct.lastAcessedNanotime = time.Now().UnixNano()
return folderNameOfKey, key, true
} else {
context.JSON(http.StatusNotFound, gin.H{
"error": "token not found",
})
return folderNameOfKey, key, false
}
}
func checkKeyToFolderName(context *gin.Context) (folderNameOfKey string, key []byte, ok bool) {
header := keyHeader{}
if err := context.ShouldBindHeader(&header); err != nil {
log.Println(err)
context.JSON(http.StatusNotFound, gin.H{
"error": "key not found",
})
return folderNameOfKey, key, false
}
key, err := base64.RawURLEncoding.DecodeString(strings.TrimSpace(header.Key))
if len(key) == 0 || err != nil {
if err != nil {
log.Println(err)
}
context.JSON(http.StatusNotFound, gin.H{
"error": "key not found",
})
return folderNameOfKey, key, false
}
folderNameOfKey = folderNameFromKey(key)
if _, err := os.Stat(folderNameOfKey); os.IsNotExist(err) {
log.Println(err)
context.JSON(http.StatusNotFound, gin.H{
"error": "key not found",
})
return folderNameOfKey, key, false
}
return folderNameOfKey, key, true
}
func deleteKey(context *gin.Context) {
folderName, _, ok := checkKeyToFolderName(context)
if !ok {
return
}
globalLock.Lock()
defer globalLock.Unlock()
err := os.RemoveAll(folderName)
if err != nil {
log.Println("error during key delete: " + err.Error())
context.JSON(http.StatusNotFound, gin.H{
"error": "key not found",
})
return
}
context.JSON(http.StatusOK, gin.H{
"message": "key deleted",
})
}
func parseCommandlineArgs() {
modelFilename = flag.String("model", "threagile.yaml", "input model yaml file")
outputDir = flag.String("output", ".", "output directory")
raaPlugin = flag.String("raa-plugin", "raa.so", "RAA calculation plugin (.so shared object) file name")
executeModelMacro = flag.String("execute-model-macro", "", "Execute model macro (by ID)")
createExampleModel = flag.Bool("create-example-model", false, "just create an example model named threagile-example-model.yaml in the output directory")
createStubModel = flag.Bool("create-stub-model", false, "just create a minimal stub model named threagile-stub-model.yaml in the output directory")
createEditingSupport = flag.Bool("create-editing-support", false, "just create some editing support stuff in the output directory")
serverPort = flag.Int("server", 0, "start a server (instead of commandline execution) on the given port")
templateFilename = flag.String("background", "background.pdf", "background pdf file")
generateDataFlowDiagram = flag.Bool("generate-data-flow-diagram", true, "generate data-flow diagram")
generateDataAssetDiagram = flag.Bool("generate-data-asset-diagram", true, "generate data asset diagram")
generateRisksJSON = flag.Bool("generate-risks-json", true, "generate risks json")
generateTechnicalAssetsJSON = flag.Bool("generate-technical-assets-json", true, "generate technical assets json")
generateStatsJSON = flag.Bool("generate-stats-json", true, "generate stats json")
generateRisksExcel = flag.Bool("generate-risks-excel", true, "generate risks excel")
generateTagsExcel = flag.Bool("generate-tags-excel", true, "generate tags excel")
generateReportPDF = flag.Bool("generate-report-pdf", true, "generate report pdf, including diagrams")
generateDefectdojoGeneric = flag.Bool("generate-defectdojo-json", true, "generate defectdojo generic json")
diagramDPI = flag.Int("diagram-dpi", defaultGraphvizDPI, "DPI used to render: maximum is "+strconv.Itoa(maxGraphvizDPI)+"")
skipRiskRules = flag.String("skip-risk-rules", "", "comma-separated list of risk rules (by their ID) to skip")
riskRulesPlugins = flag.String("custom-risk-rules-plugins", "", "comma-separated list of plugins (.so shared object) file names with custom risk rules to load")
verbose = flag.Bool("verbose", false, "verbose output")
ignoreOrphanedRiskTracking = flag.Bool("ignore-orphaned-risk-tracking", false, "ignore orphaned risk tracking (just log them) not matching a concrete risk")
version := flag.Bool("version", false, "print version")
listTypes := flag.Bool("list-types", false, "print type information (enum values to be used in models)")
listRiskRules := flag.Bool("list-risk-rules", false, "print risk rules")
listModelMacros := flag.Bool("list-model-macros", false, "print model macros")
print3rdParty := flag.Bool("print-3rd-party-licenses", false, "print 3rd-party license information")
license := flag.Bool("print-license", false, "print license information")
flag.Usage = func() {
printLogo()
fmt.Fprintf(os.Stderr, "Usage: threagile [options]")
fmt.Println()
fmt.Println()
fmt.Println()
fmt.Println("Options:")
fmt.Println()
flag.PrintDefaults()
fmt.Println()
fmt.Println()
fmt.Println("Examples:")
fmt.Println()
fmt.Println("If you want to create an example model (via docker) as a starting point to learn about Threagile just run: ")
fmt.Println(" docker run --rm -it " +
"-v \"$(pwd)\":/app/work " +
"threagile/threagile " +
"-create-example-model " +
"-output /app/work")
fmt.Println()
fmt.Println("If you want to create a minimal stub model (via docker) as a starting point for your own model just run: ")
fmt.Println(" docker run --rm -it " +
"-v \"$(pwd)\":/app/work " +
"threagile/threagile " +
"-create-stub-model " +
"-output /app/work")
fmt.Println()
printExamples()
fmt.Println()
}
flag.Parse()
if *diagramDPI < 20 {
*diagramDPI = 20
} else if *diagramDPI > maxGraphvizDPI {
*diagramDPI = 300
}
if *version {
printLogo()
os.Exit(0)
}
if *listTypes {
printLogo()
fmt.Println("The following types are available (can be extended for custom rules):")
fmt.Println()
printTypes("Authentication", model.AuthenticationValues())
fmt.Println()
printTypes("Authorization", model.AuthorizationValues())
fmt.Println()
printTypes("Confidentiality", confidentiality.ConfidentialityValues())
fmt.Println()
printTypes("Criticality (for integrity and availability)", criticality.CriticalityValues())
fmt.Println()
printTypes("Data Breach Probability", model.DataBreachProbabilityValues())
fmt.Println()
printTypes("Data Format", model.DataFormatValues())
fmt.Println()
printTypes("Encryption", model.EncryptionStyleValues())
fmt.Println()
printTypes("Protocol", model.ProtocolValues())
fmt.Println()
printTypes("Quantity", model.QuantityValues())
fmt.Println()
printTypes("Risk Exploitation Impact", model.RiskExploitationImpactValues())
fmt.Println()
printTypes("Risk Exploitation Likelihood", model.RiskExploitationLikelihoodValues())
fmt.Println()
printTypes("Risk Function", model.RiskFunctionValues())
fmt.Println()
printTypes("Risk Severity", model.RiskSeverityValues())
fmt.Println()
printTypes("Risk Status", model.RiskStatusValues())
fmt.Println()
printTypes("STRIDE", model.STRIDEValues())
fmt.Println()
printTypes("Technical Asset Machine", model.TechnicalAssetMachineValues())
fmt.Println()
printTypes("Technical Asset Size", model.TechnicalAssetSizeValues())
fmt.Println()
printTypes("Technical Asset Technology", model.TechnicalAssetTechnologyValues())
fmt.Println()
printTypes("Technical Asset Type", model.TechnicalAssetTypeValues())
fmt.Println()
printTypes("Trust Boundary Type", model.TrustBoundaryTypeValues())
fmt.Println()
printTypes("Usage", model.UsageValues())
fmt.Println()
os.Exit(0)
}
if *listModelMacros {
printLogo()
fmt.Println("The following model macros are available (can be extended via custom model macros):")
fmt.Println()
/* TODO finish plugin stuff
fmt.Println("Custom model macros:")
for id, customModelMacro := range customModelMacros {
fmt.Println(id, "-->", customModelMacro.GetMacroDetails().Title)
}
fmt.Println()
*/
fmt.Println("----------------------")
fmt.Println("Built-in model macros:")
fmt.Println("----------------------")
fmt.Println(add_build_pipeline.GetMacroDetails().ID, "-->", add_build_pipeline.GetMacroDetails().Title)
fmt.Println(add_vault.GetMacroDetails().ID, "-->", add_vault.GetMacroDetails().Title)
fmt.Println(pretty_print.GetMacroDetails().ID, "-->", pretty_print.GetMacroDetails().Title)
fmt.Println(remove_unused_tags.GetMacroDetails().ID, "-->", remove_unused_tags.GetMacroDetails().Title)
fmt.Println(seed_risk_tracking.GetMacroDetails().ID, "-->", seed_risk_tracking.GetMacroDetails().Title)
fmt.Println(seed_tags.GetMacroDetails().ID, "-->", seed_tags.GetMacroDetails().Title)
fmt.Println()
os.Exit(0)
}
if *listRiskRules {
printLogo()
fmt.Println("The following risk rules are available (can be extended via custom risk rules):")
fmt.Println()
loadRiskRulePlugins()
for _, riskRule := range builtinRiskRulesPlugins {
fmt.Println(riskRule.Category().Id, "-->", riskRule.Category().Title, "--> with tags:", riskRule.SupportedTags())
}
fmt.Println()
os.Exit(0)
}
if *print3rdParty {
printLogo()
fmt.Println("Kudos & Credits to the following open-source projects:")
fmt.Println(" - golang (Google Go License): https://golang.org/LICENSE")
fmt.Println(" - go-yaml (MIT License): https://github.com/go-yaml/yaml/blob/v3/LICENSE")
fmt.Println(" - graphviz (CPL License): https://graphviz.gitlab.io/license/")
fmt.Println(" - gofpdf (MIT License): https://github.com/jung-kurt/gofpdf/blob/master/LICENSE")
fmt.Println(" - go-chart (MIT License): https://github.com/wcharczuk/go-chart/blob/master/LICENSE")
fmt.Println(" - excelize (BSD License): https://github.com/qax-os/excelize/blob/master/LICENSE")
fmt.Println(" - graphics-go (BSD License): https://github.com/BurntSushi/graphics-go/blob/master/LICENSE")
fmt.Println(" - google-uuid (BSD License): https://github.com/google/uuid/blob/master/LICENSE")
fmt.Println(" - gin-gonic (MIT License): https://github.com/gin-gonic/gin/blob/master/LICENSE")
fmt.Println(" - swagger-ui (Apache License): https://swagger.io/license/")
fmt.Println()
os.Exit(0)
}
if *license {
printLogo()
content, err := ioutil.ReadFile("/app/LICENSE.txt")
support.CheckErr(err)
fmt.Print(string(content))
fmt.Println()
os.Exit(0)
}
if *createExampleModel {
createExampleModelFile()
printLogo()
fmt.Println("An example model was created named threagile-example-model.yaml in the output directory.")
fmt.Println()
printExamples()
fmt.Println()
os.Exit(0)
}
if *createStubModel {
createStubModelFile()
printLogo()
fmt.Println("A minimal stub model was created named threagile-stub-model.yaml in the output directory.")
fmt.Println()
printExamples()
fmt.Println()
os.Exit(0)
}
if *createEditingSupport {
createEditingSupportFiles()
printLogo()
fmt.Println("The following files were created in the output directory:")
fmt.Println(" - schema.json")
fmt.Println(" - live-templates.txt")
fmt.Println()
fmt.Println("For a perfect editing experience within your IDE of choice you can easily get " +
"model syntax validation and autocompletion (very handy for enum values) as well as live templates: " +
"Just import the schema.json into your IDE and assign it as \"schema\" to each Threagile YAML file. " +
"Also try to import individual parts from the live-templates.txt file into your IDE as live editing templates.")
fmt.Println()
os.Exit(0)
}
}
func printLogo() {
fmt.Println()
fmt.Println(" _____ _ _ _ \n |_ _| |__ _ __ ___ __ _ __ _(_) | ___ \n | | | '_ \\| '__/ _ \\/ _` |/ _` | | |/ _ \\\n | | | | | | | | __/ (_| | (_| | | | __/\n |_| |_| |_|_| \\___|\\__,_|\\__, |_|_|\\___|\n |___/ ")
fmt.Println("Threagile - Agile Threat Modeling")
fmt.Println()
fmt.Println()
printVersion()
}
func printVersion() {
fmt.Println("Documentation: https://threagile.io")
fmt.Println("Docker Images: https://github.com/Otyg/threagile/pkgs/container/threagile")
fmt.Println("Sourcecode: https://github.com/otyg/threagile")
fmt.Println("License: Open-Source (MIT License)")
fmt.Println("Version: " + model.ThreagileVersion + " (" + buildTimestamp + ")")
fmt.Println()
fmt.Println()
}
func createExampleModelFile() {
support.CopyFile("/app/threagile-example-model.yaml", *outputDir+"/threagile-example-model.yaml")
}
func createStubModelFile() {
stub, err := ioutil.ReadFile("/app/threagile-stub-model.yaml")
support.CheckErr(err)
err = ioutil.WriteFile(*outputDir+"/threagile-stub-model.yaml", addSupportedTags(stub), 0644)
support.CheckErr(err)
}
func createEditingSupportFiles() {
support.CopyFile("/app/schema.json", *outputDir+"/schema.json")
support.CopyFile("/app/live-templates.txt", *outputDir+"/live-templates.txt")
}
func printExamples() {
fmt.Println("If you want to execute Threagile on a model yaml file (via docker): ")
fmt.Println(" docker run --rm -it " +
"-v \"$(pwd)\":/app/work " +
"threagile/threagile " +
"-verbose " +
"-model /app/work/threagile.yaml " +
"-output /app/work")
fmt.Println()
fmt.Println("If you want to run Threagile as a server (REST API) on some port (here 8080): ")
fmt.Println(" docker run --rm -it " +
"--shm-size=256m " +
"-p 8080:8080 " +
"--name threagile-server " +
"--mount 'type=volume,src=threagile-storage,dst=/data,readonly=false' " +
"threagile/threagile -server 8080")
fmt.Println()
fmt.Println("If you want to find out about the different enum values usable in the model yaml file: ")
fmt.Println(" docker run --rm -it threagile/threagile -list-types")
fmt.Println()
fmt.Println("If you want to use some nice editing help (syntax validation, autocompletion, and live templates) in your favourite IDE: ")
fmt.Println(" docker run --rm -it -v \"$(pwd)\":/app/work threagile/threagile -create-editing-support -output /app/work")
fmt.Println()
fmt.Println("If you want to list all available model macros (which are macros capable of reading a model yaml file, asking you questions in a wizard-style and then update the model yaml file accordingly): ")
fmt.Println(" docker run --rm -it threagile/threagile -list-model-macros")
fmt.Println()
fmt.Println("If you want to execute a certain model macro on the model yaml file (here the macro add-build-pipeline): ")
fmt.Println(" docker run --rm -it -v \"$(pwd)\":/app/work threagile/threagile -model /app/work/threagile.yaml -output /app/work -execute-model-macro add-build-pipeline")
}
func printTypes(title string, value interface{}) {
fmt.Println(fmt.Sprintf(" %v: %v", title, value))
}
func parseModel(inputFilename string) {
if *verbose {
fmt.Println("Parsing model:", inputFilename)
}
modelYaml, err := ioutil.ReadFile(inputFilename)
support.CheckErr(err)
var validatorYaml interface{}
support.CheckErr(err)
err = yaml.Unmarshal([]byte(modelYaml), &validatorYaml)
support.CheckErr(err)
validatorYaml, err = support.ToStringKeys(validatorYaml)
support.CheckErr(err)
compiler := jsonschema.NewCompiler()
compiler.Draft = jsonschema.Draft2020
compiler.AssertContent = true
compiler.AssertFormat = true
schemaFile, err := ioutil.ReadFile("schema.json")
support.CheckErr(err)
if err := compiler.AddResource("schema.json", strings.NewReader(string(schemaFile))); err != nil {
panic(err)
}
schema, err := compiler.Compile("schema.json")
if err != nil {
panic(err)
}
if err := schema.Validate(validatorYaml); err != nil {
panic(err)
}
model.ParsedModelRoot = model.ParseModel(modelYaml, deferredRiskTrackingDueToWildcardMatching)
}
func applyWildcardRiskTrackingEvaluation() {
if *verbose {
fmt.Println("Executing risk tracking evaluation")
}
for syntheticRiskIdPattern, riskTracking := range deferredRiskTrackingDueToWildcardMatching {
foundSome := false
var matchingRiskIdExpression = regexp.MustCompile(strings.ReplaceAll(regexp.QuoteMeta(syntheticRiskIdPattern), `\*`, `[^@]+`))
for syntheticRiskId, _ := range model.GeneratedRisksBySyntheticId {
if matchingRiskIdExpression.Match([]byte(syntheticRiskId)) && hasNotYetAnyDirectNonWildcardRiskTrackings(syntheticRiskId) {
foundSome = true
model.ParsedModelRoot.RiskTracking[syntheticRiskId] = model.RiskTracking{
SyntheticRiskId: strings.TrimSpace(syntheticRiskId),
Justification: riskTracking.Justification,
CheckedBy: riskTracking.CheckedBy,
Ticket: riskTracking.Ticket,
Status: riskTracking.Status,
Date: riskTracking.Date,
}
}
}
if !foundSome {
if *ignoreOrphanedRiskTracking {
fmt.Println("Wildcard risk tracking does not match any risk id: " + syntheticRiskIdPattern)
} else {
panic(errors.New("wildcard risk tracking does not match any risk id: " + syntheticRiskIdPattern))
}
}
}
}
func hasNotYetAnyDirectNonWildcardRiskTrackings(syntheticRiskId string) bool {
if _, ok := model.ParsedModelRoot.RiskTracking[syntheticRiskId]; ok {
return false
}
return true
}
| {
uuidParsed, err := uuid.Parse(modelUUID)
if err != nil {
context.JSON(http.StatusNotFound, gin.H{
"error": "model not found",
})
return modelFolder, false
}
modelFolder = folderNameForModel(folderNameOfKey, uuidParsed.String())
if _, err := os.Stat(modelFolder); os.IsNotExist(err) {
context.JSON(http.StatusNotFound, gin.H{
"error": "model not found",
})
return modelFolder, false
}
return modelFolder, true
} |
0001_initial.py | # -*- coding: utf-8 -*-
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('choice', models.CharField(max_length=256, verbose_name='valg')),
],
options={
'permissions': (('view_choice', 'View Choice'),),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Feedback',
fields=[
('feedback_id', models.AutoField(serialize=False, primary_key=True)),
('description', models.CharField(max_length=100, verbose_name='beskrivelse')),
('display_field_of_study', models.BooleanField(default=True, help_text='Grafen over studiefelt vil bli vist til bedriften', verbose_name='Vis studie oversikt')),
('display_info', models.BooleanField(default=True, help_text='En boks med ekstra informasjon vil bli vist til bedriften', verbose_name='Vis extra informasjon')),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'tilbakemeldingsskjema',
'verbose_name_plural': 'tilbakemeldingsskjemaer',
'permissions': (('view_feedback', 'View Feedback'),),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='FeedbackRelation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('deadline', models.DateField(verbose_name='Tidsfrist')),
('gives_mark', models.BooleanField(default=True, help_text='Gir automatisk prikk til brukere som ikke har svart innen fristen', verbose_name='Gir Prikk')),
('active', models.BooleanField(default=True)),
('created_date', models.DateTimeField(auto_now_add=True)),
('first_mail_sent', models.BooleanField(default=False)),
('answered', models.ManyToManyField(related_name='feedbacks', null=True, to=settings.AUTH_USER_MODEL, blank=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('feedback', models.ForeignKey(verbose_name='Tilbakemeldingskjema', to='feedback.Feedback')),
],
options={
'verbose_name': 'tilbakemelding',
'verbose_name_plural': 'tilbakemeldinger',
'permissions': (('view_feedbackrelation', 'View FeedbackRelation'),),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='FieldOfStudyAnswer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('answer', models.SmallIntegerField(verbose_name='Studieretning', choices=[(0, 'Gjest'), (1, 'Bachelor i Informatikk (BIT)'), (10, 'Software (SW)'), (11, 'Informasjonsforvaltning (DIF)'), (12, 'Komplekse Datasystemer (KDS)'), (13, 'Spillteknologi (SPT)'), (14, 'Intelligente Systemer (IRS)'), (15, 'Helseinformatikk (MSMEDTEK)'), (30, 'Annen mastergrad'), (80, 'PhD'), (90, 'International'), (100, 'Annet Onlinemedlem')])),
('feedback_relation', models.ForeignKey(related_name='field_of_study_answers', to='feedback.FeedbackRelation')),
],
options={
'permissions': (('view_fieldofstudyanswer', 'View FieldOfStudyAnswer'),),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='MultipleChoiceAnswer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('answer', models.CharField(max_length=256, verbose_name='svar')),
('feedback_relation', models.ForeignKey(related_name='multiple_choice_answers', to='feedback.FeedbackRelation')),
],
options={
'permissions': (('view_multiplechoiceanswer', 'View MultipleChoiceAnswer'),),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='MultipleChoiceQuestion',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('label', models.CharField(max_length=256, verbose_name='Sp\xf8rsm\xe5l')),
],
options={
'permissions': (('view_multiplechoicequestion', 'View MultipleChoiceQuestion'),),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='MultipleChoiceRelation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('order', models.SmallIntegerField(default=30, verbose_name='Rekkef\xf8lge')),
('display', models.BooleanField(default=True, verbose_name='Vis til bedrift')),
('feedback', models.ForeignKey(related_name='multiple_choice_questions', to='feedback.Feedback')),
('multiple_choice_relation', models.ForeignKey(to='feedback.MultipleChoiceQuestion')),
],
options={
'permissions': (('view_multiplechoicerelation', 'View MultipleChoiceRelation'),),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='RatingAnswer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('answer', models.SmallIntegerField(default=0, verbose_name='karakter', choices=[(1, b'1'), (2, b'2'), (3, b'3'), (4, b'4'), (5, b'5'), (6, b'6')])),
('feedback_relation', models.ForeignKey(related_name='rating_answers', to='feedback.FeedbackRelation')),
],
options={
'permissions': (('view_ratinganswer', 'View RatingAnswer'),),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='RatingQuestion',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('order', models.SmallIntegerField(default=20, verbose_name='Rekkef\xf8lge')),
('label', models.CharField(max_length=256, verbose_name='Sp\xf8rsm\xe5l')),
('display', models.BooleanField(default=True, verbose_name='Vis til bedrift')),
('feedback', models.ForeignKey(related_name='rating_questions', to='feedback.Feedback')),
],
options={
'permissions': (('view_ratingquestion', 'View RatingQuestion'),),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='RegisterToken',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('token', models.CharField(max_length=32, verbose_name='token')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='opprettet dato')),
('fbr', models.ForeignKey(related_name='Feedback_relation', to='feedback.FeedbackRelation')),
],
options={
'permissions': (('view_feedbackregistertoken', 'View FeedbackRegisterToken'),),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TextAnswer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('answer', models.TextField(verbose_name='svar')),
('feedback_relation', models.ForeignKey(related_name='text_answers', to='feedback.FeedbackRelation')),
],
options={
'permissions': (('view_textanswer', 'View TextAnswer'),),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='TextQuestion',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('order', models.SmallIntegerField(default=10, verbose_name='Rekkef\xf8lge')),
('label', models.CharField(max_length=256, verbose_name='Sp\xf8rsm\xe5l')),
('display', models.BooleanField(default=True, verbose_name='Vis til bedrift')),
('feedback', models.ForeignKey(related_name='text_questions', to='feedback.Feedback')),
],
options={
'permissions': (('view_textquestion', 'View TextQuestion'),),
},
bases=(models.Model,),
),
migrations.AddField(
model_name='textanswer',
name='question',
field=models.ForeignKey(related_name='answer', to='feedback.TextQuestion'),
preserve_default=True,
),
migrations.AddField(
model_name='ratinganswer',
name='question',
field=models.ForeignKey(related_name='answer', to='feedback.RatingQuestion'),
preserve_default=True,
),
migrations.AddField(
model_name='multiplechoiceanswer',
name='question',
field=models.ForeignKey(related_name='answer', to='feedback.MultipleChoiceRelation'), | migrations.AlterUniqueTogether(
name='feedbackrelation',
unique_together=set([('feedback', 'content_type', 'object_id')]),
),
migrations.AddField(
model_name='choice',
name='question',
field=models.ForeignKey(related_name='choices', to='feedback.MultipleChoiceQuestion'),
preserve_default=True,
),
] | preserve_default=True,
), |
_colorbar.py | import _plotly_utils.basevalidators
class ColorbarValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="colorbar", parent_name="scatter.marker", **kwargs):
super(ColorbarValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "ColorBar"),
data_docs=kwargs.pop(
"data_docs",
"""
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
minexponent
Hide SI prefix for 10^n if |n| is below this
number. This only has an effect when
`tickformat` is "SI" or "B".
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
And for dates see:
https://github.com/d3/d3-time-
format#locale_format We add one item to d3's
date formatter: "%{n}f" for fractional seconds
with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f"
would display "09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.scatter
.marker.colorbar.Tickformatstop` instances or
dicts with compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.scatter.marker.colorbar.tickformatstopdefault
s), sets the default property values to use for
elements of
scatter.marker.colorbar.tickformatstops
ticklabelposition
Determines where tick labels are drawn.
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud
for ticktext .
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud
for tickvals .
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.scatter.marker.col
orbar.Title` instance or dict with compatible
properties
titlefont
Deprecated: Please use
scatter.marker.colorbar.title.font instead.
Sets this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
scatter.marker.colorbar.title.side instead.
Determines the location of color bar's title
with respect to the color bar. Note that the
title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position | the "left", "center" or "right" of the color
bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
""",
),
**kwargs
) | anchor. This anchor binds the `x` position to |
v1_event_chart.go | // Copyright 2018-2021 Polyaxon, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by go-swagger; DO NOT EDIT.
package service_model
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"github.com/go-openapi/errors"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
)
// V1EventChart Chart spec definition
//
// swagger:model v1EventChart
type V1EventChart struct {
// Figure json object
Figure interface{} `json:"figure,omitempty"`
// Chart kind
Kind *V1EventChartKind `json:"kind,omitempty"`
}
// Validate validates this v1 event chart
func (m *V1EventChart) Validate(formats strfmt.Registry) error {
var res []error
if err := m.validateKind(formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (m *V1EventChart) validateKind(formats strfmt.Registry) error {
if swag.IsZero(m.Kind) { // not required
return nil
}
if m.Kind != nil {
if err := m.Kind.Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("kind")
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("kind")
}
return err
}
}
return nil
}
// ContextValidate validate this v1 event chart based on the context it is used
func (m *V1EventChart) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
var res []error
if err := m.contextValidateKind(ctx, formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (m *V1EventChart) contextValidateKind(ctx context.Context, formats strfmt.Registry) error {
if m.Kind != nil {
if err := m.Kind.ContextValidate(ctx, formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("kind")
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("kind")
}
return err
}
}
return nil
}
// MarshalBinary interface implementation
func (m *V1EventChart) MarshalBinary() ([]byte, error) {
if m == nil {
return nil, nil
}
return swag.WriteJSON(m)
}
// UnmarshalBinary interface implementation | return err
}
*m = res
return nil
} | func (m *V1EventChart) UnmarshalBinary(b []byte) error {
var res V1EventChart
if err := swag.ReadJSON(b, &res); err != nil { |
rpcnamedargs.py | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Copyright (c) 2017 The Ravencoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test using named arguments for RPCs."""
from test_framework.test_framework import BayemcoinTestFramework
from test_framework.util import (
assert_equal, | def set_test_params(self):
self.num_nodes = 1
def run_test(self):
node = self.nodes[0]
h = node.help(command='getinfo')
assert(h.startswith('getinfo\n'))
assert_raises_jsonrpc(-8, 'Unknown named parameter', node.help, random='getinfo')
h = node.getblockhash(height=0)
node.getblock(blockhash=h)
assert_equal(node.echo(), [])
assert_equal(node.echo(arg0=0,arg9=9), [0] + [None]*8 + [9])
assert_equal(node.echo(arg1=1), [None, 1])
assert_equal(node.echo(arg9=None), [None]*10)
assert_equal(node.echo(arg0=0,arg3=3,arg9=9), [0] + [None]*2 + [3] + [None]*5 + [9])
if __name__ == '__main__':
NamedArgumentTest().main() | assert_raises_rpc_error,
)
class NamedArgumentTest(BayemcoinTestFramework): |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.