file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
day06.rs | extern crate regex;
use std::io::prelude::*;
use std::io::BufReader;
use std::fs::File;
use regex::Regex;
type Lights = [[u8; 1000]; 1000];
fn turn<F>(lights: &mut Lights, xstart: u16, ystart: u16, xend: u16, yend: u16, update_function: F)
where F: Fn(u8) -> u8 |
fn turn_on(lights: &mut Lights, xstart: u16, ystart: u16, xend: u16, yend: u16) {
turn(lights, xstart, ystart, xend, yend, |x: u8| -> u8 { x + 1 });
}
fn turn_off(lights: &mut Lights, xstart: u16, ystart: u16, xend: u16, yend: u16) {
turn(lights, xstart, ystart, xend, yend, |x: u8| -> u8 {
if x == 0 { 0 }
else { x - 1}
});
}
fn toggle(lights: &mut Lights, xstart: u16, ystart: u16, xend: u16, yend: u16) {
turn(lights, xstart, ystart, xend, yend, |x: u8| -> u8 { x + 2 });
}
// Get the number of lights that are on.
fn num_on_lights(lights: &Lights) -> u64 {
let mut i = 0;
for arr in lights.iter() {
for value in arr.iter() {
if (*value) >= 1 {
i += 1;
}
}
}
return i
}
// Get the total brightness of all the lights.
fn total_brightness(lights: &Lights) -> u64 {
let mut i: u64 = 0;
for arr in lights.iter() {
for value in arr.iter() {
i += (*value) as u64;
}
}
return i
}
fn main() {
let mut lights = [[0u8; 1000]; 1000];
let command_regex = Regex::new(r"(?P<action>(?:toggle|(?:turn (?:on|off)))) (?P<xstart>\d+),(?P<ystart>\d+) through (?P<xend>\d+),(?P<yend>\d+)").unwrap();
let file = File::open("input.txt").unwrap();
let reader = BufReader::new(&file);
for wrapped_line in reader.lines() {
let line = wrapped_line.unwrap();
let captures = command_regex.captures(&line).unwrap();
let action = captures.name("action").unwrap();
let xstart = captures.name("xstart").unwrap().parse::<u16>().unwrap();
let ystart = captures.name("ystart").unwrap().parse::<u16>().unwrap();
let xend = captures.name("xend").unwrap().parse::<u16>().unwrap();
let yend = captures.name("yend").unwrap().parse::<u16>().unwrap();
match action {
"turn on" => turn_on(&mut lights, xstart, ystart, xend, yend),
"turn off" => turn_off(&mut lights, xstart, ystart, xend, yend),
"toggle" => toggle(&mut lights, xstart, ystart, xend, yend),
_ => panic!(format!("Can not do action '{}'", action))
}
}
println!("Number of on lights: {}", num_on_lights(&lights));
println!("Total brightness of lights: {}", total_brightness(&lights));
}
| {
for x in xstart..(xend+1) {
let ref mut v = lights[x as usize];
for y in ystart..(yend+1) {
v[y as usize] = update_function(v[y as usize]);
}
}
} |
resolve-uri.spec.ts | import {
Api,
ApiResolver,
Client,
InvokeApiOptions,
InvokeApiResult,
Manifest,
Plugin,
PluginModules,
PluginPackage,
QueryApiOptions,
QueryApiResult,
SchemaDocument,
Uri,
UriRedirect,
resolveUri,
} from "../";
describe("resolveUri", () => {
const client = (
redirects: UriRedirect[],
apis: Record<string, PluginModules>
): Client => ({
redirects: () => redirects,
query: <
TData extends Record<string, unknown> = Record<string, unknown>,
TVariables extends Record<string, unknown> = Record<string, unknown>
>(_options: QueryApiOptions<TVariables>): Promise<QueryApiResult<TData>> => {
return Promise.resolve({
data: ({
foo: "foo",
} as Record<string, unknown>) as TData
});
},
invoke: <TData = unknown>(
options: InvokeApiOptions
): Promise<InvokeApiResult<TData>> => {
return Promise.resolve({
data: apis[options.uri.uri]?.[options.module]?.[options.method](
options.input as Record<string, unknown>,
{} as Client
) as TData,
});
},
});
const createPluginApi = (uri: Uri, plugin: PluginPackage): Api => {
return {
invoke: () =>
Promise.resolve({
uri,
plugin,
} as InvokeApiResult),
getSchema: (_client: Client): Promise<string> =>
Promise.resolve("")
};
};
const createApi = (uri: Uri, manifest: Manifest, apiResolver: Uri): Api => {
return {
invoke: () =>
Promise.resolve({
uri,
manifest,
apiResolver,
} as InvokeApiResult),
getSchema: (_client: Client): Promise<string> =>
Promise.resolve("")
};
};
const ensApi: PluginModules = {
query: {
tryResolveUri: (
input: { authority: string; path: string },
_client: Client
) => {
return {
uri: input.authority === "ens" ? "ipfs/QmHash" : undefined,
};
},
},
};
const ipfsApi: PluginModules = {
query: {
tryResolveUri: (
input: { authority: string; path: string },
_client: Client
) => {
return {
manifest: | },
};
const pluginApi: PluginModules = {
query: {
tryResolveUri: (
input: { authority: string; path: string },
_client: Client
) => {
return {
manifest:
input.authority === "my" ? "format: 0.0.1-prealpha.1" : undefined,
};
},
},
};
const redirects: UriRedirect[] = [
{
from: new Uri("w3/api-resolver"),
to: new Uri("ens/ens"),
},
{
from: new Uri("w3/api-resolver"),
to: new Uri("ens/ipfs"),
},
{
from: new Uri("ens/my-plugin"),
to: {
factory: () => ({} as Plugin),
manifest: {
schema: "",
implemented: [new Uri("w3/api-resolver")],
imported: [],
},
},
},
];
const apis: Record<string, PluginModules> = {
"w3://ens/ens": ensApi,
"w3://ens/ipfs": ipfsApi,
"w3://ens/my-plugin": pluginApi,
};
it("sanity", () => {
const api = new Uri("w3://ens/ens");
const file = new Uri("w3/some-file");
const path = "w3/some-path";
const query = ApiResolver.Query;
const uri = new Uri("w3/some-uri");
expect(query.tryResolveUri(client(redirects, apis), api, uri)).toBeDefined();
expect(query.getFile(client(redirects, apis), file, path)).toBeDefined();
});
it("works in the typical case", async () => {
const result = await resolveUri(
new Uri("ens/test.eth"),
client(redirects, apis),
createPluginApi,
createApi,
true
);
const apiIdentity = await result.invoke(
{} as InvokeApiOptions,
{} as Client
);
expect(apiIdentity).toMatchObject({
uri: new Uri("ipfs/QmHash"),
manifest: {
format: "0.0.1-prealpha.1"
},
apiResolver: new Uri("ens/ipfs"),
});
});
it("uses a plugin that implements api-resolver", async () => {
const result = await resolveUri(
new Uri("my/something-different"),
client(redirects, apis),
createPluginApi,
createApi,
true
);
const apiIdentity = await result.invoke(
{} as InvokeApiOptions,
{} as Client
);
expect(apiIdentity).toMatchObject({
uri: new Uri("my/something-different"),
manifest: {
format: "0.0.1-prealpha.1"
},
apiResolver: new Uri("ens/my-plugin"),
});
});
it("works when direct query a Web3API that implements the api-resolver", async () => {
const result = await resolveUri(
new Uri("ens/ens"),
client(redirects, apis),
createPluginApi,
createApi,
true
);
const apiIdentity = await result.invoke(
{} as InvokeApiOptions,
{} as Client
);
expect(apiIdentity).toMatchObject({
uri: new Uri("ipfs/QmHash"),
manifest: {
format: "0.0.1-prealpha.1",
dog: "cat"
},
apiResolver: new Uri("ens/ipfs"),
});
});
it("works when direct query a plugin Web3API that implements the api-resolver", async () => {
const result = await resolveUri(
new Uri("my/something-different"),
client(redirects, apis),
createPluginApi,
createApi,
true
);
const apiIdentity = await result.invoke(
{} as InvokeApiOptions,
{} as Client
);
expect(apiIdentity).toMatchObject({
uri: new Uri("my/something-different"),
manifest: {
format: "0.0.1-prealpha.1"
},
apiResolver: new Uri("ens/my-plugin"),
});
});
it("throws when circular redirect loops are found", async () => {
const circular: UriRedirect[] = [
...redirects,
{
from: new Uri("some/api"),
to: new Uri("ens/api"),
},
{
from: new Uri("ens/api"),
to: new Uri("some/api"),
},
];
expect.assertions(1);
return resolveUri(
new Uri("some/api"),
client(circular, apis),
createPluginApi,
createApi,
true
).catch((e) =>
expect(e.message).toMatch(/Infinite loop while resolving URI/)
);
});
it("throws when redirect missing the from property", async () => {
const missingFromProperty: UriRedirect[] = [
...redirects,
{
from: new Uri("some/api"),
to: new Uri("ens/api"),
},
{
from: null,
to: new Uri("another/api"),
},
];
expect.assertions(1);
return resolveUri(
new Uri("some/api"),
client(missingFromProperty, apis),
createPluginApi,
createApi,
true
).catch((e) =>
expect(e.message).toMatch("Redirect missing the from property.\nEncountered while resolving w3://some/api")
);
});
it("works when a Web3API redirects to a Plugin", async () => {
const uriToPlugin: UriRedirect[] = [
...redirects,
{
from: new Uri("some/api"),
to: {
factory: () => ({} as Plugin),
manifest: {
schema: "",
implemented: [new Uri("w3/api-resolver")],
imported: [],
},
},
},
];
const result = await resolveUri(
new Uri("some/api"),
client(uriToPlugin, apis),
createPluginApi,
createApi,
true
);
const apiIdentity = await result.invoke(
{} as InvokeApiOptions,
{} as Client
);
expect(apiIdentity.error).toBeUndefined();
});
it("throw when URI does not resolve to an API", async () => {
const faultyIpfsApi: PluginModules = {
query: {
tryResolveUri: (
input: { authority: string; path: string },
_client: Client
) => {
return {
manifest: null
};
},
},
};
const uri = new Uri("some/api");
expect.assertions(1);
await resolveUri(
uri,
client(redirects, {
...apis,
"w3://ens/ipfs": faultyIpfsApi
}),
createPluginApi,
createApi,
true
).catch((e) =>
expect(e.message).toMatch(`No Web3API found at URI: ${uri.uri}`)
);
});
});
// TODO:
// plugin that has a URI which is being redirected
// plugin which has from = uri-resolver, then have another redirect uri-resolver to something else (could easily break...)
// nested web3api that's a URI resolver available through another URI authority ([ens => crypto], [crypto => new]) | input.authority === "ipfs" ? "format: 0.0.1-prealpha.1\ndog: cat" : undefined,
};
}, |
google.go | // Copyright © 2018 Banzai Cloud
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package verify
import (
"context"
"encoding/json"
"errors"
"fmt"
"net/http"
"strconv"
"strings"
pkgSecret "github.com/banzaicloud/pipeline/pkg/secret"
"github.com/goph/emperror"
"github.com/sirupsen/logrus"
"golang.org/x/oauth2/google"
"golang.org/x/oauth2/jwt"
"google.golang.org/api/cloudresourcemanager/v1"
"google.golang.org/api/serviceusage/v1"
)
const (
ComputeEngineAPI = "compute.googleapis.com"
KubernetesEngineAPI = "container.googleapis.com"
GoogleCloudStorage = "storage-component.googleapis.com"
IAMServiceAccountCredentialsAPI = "iamcredentials.googleapis.com"
CloudResourceManagerAPI = "cloudresourcemanager.googleapis.com"
)
// GCPSecretVerifier represents a secret verifier for Google Cloud Platform secrets
type GCPSecretVerifier struct {
*ServiceAccount
}
// CreateGCPSecretVerifier creates a new Google Cloud Platform secret verifier
func CreateGCPSecretVerifier(values map[string]string) GCPSecretVerifier {
return GCPSecretVerifier{CreateServiceAccount(values)}
}
// VerifySecret validates GCP credentials
func (sv GCPSecretVerifier) VerifySecret() error {
return checkProject(sv.ServiceAccount)
}
func checkProject(serviceAccount *ServiceAccount) error {
missing, err := checkRequiredServices(serviceAccount)
if err != nil {
return err
}
if len(missing) != 0 {
return fmt.Errorf("required API services are disabled: %s", strings.Join(missing, ","))
}
return nil
}
func checkRequiredServices(serviceAccount *ServiceAccount) ([]string, error) {
requiredServices := map[string]string{
ComputeEngineAPI: "Compute Engine API",
KubernetesEngineAPI: "Kubernetes Engine API",
GoogleCloudStorage: "Google Cloud Storage",
IAMServiceAccountCredentialsAPI: "IAM ServiceAccount Credentials API",
CloudResourceManagerAPI: "Cloud Resource Manager API",
}
enabledServices, err := listEnabledServices(serviceAccount)
if err != nil {
logrus.Error(err)
return nil, errors.New("list enabled services failed")
}
var missingServices []string
for service, readableName := range requiredServices {
if !contains(enabledServices, service) {
missingServices = append(missingServices, readableName)
}
}
return missingServices, nil
}
func contains(values []string, value string) bool {
for _, v := range values {
if v == value {
return true
}
}
return false
}
func listEnabledServices(serviceAccount *ServiceAccount) ([]string, error) {
client, err := CreateOath2Client(serviceAccount, serviceusage.CloudPlatformScope)
if err != nil {
return nil, err
}
crmSvc, err := cloudresourcemanager.New(client)
if err != nil {
return nil, err
}
project, err := crmSvc.Projects.Get(serviceAccount.ProjectId).Do()
if err != nil {
return nil, err
}
suSvc, err := serviceusage.New(client)
if err != nil {
return nil, emperror.Wrap(err, "cannot create serviceusage client for checking enabled services")
}
enabledServicesCall := suSvc.Services.List("projects/" + strconv.FormatInt(project.ProjectNumber, 10)).Filter("state:ENABLED").Fields("services/config/name")
var enabledServices []string
nextPageToken := ""
for {
resp, err := enabledServicesCall.PageToken(nextPageToken).Do()
if err != nil {
return nil, emperror.Wrap(err, "enabled services call failed")
}
for _, service := range resp.Services {
enabledServices = append(enabledServices, service.Config.Name)
}
if resp.NextPageToken == "" {
return enabledServices, nil
}
nextPageToken = resp.NextPageToken
}
}
// ServiceAccount describes a GKE service account
type ServiceAccount struct {
Type string `json:"type"`
ProjectId string `json:"project_id"`
PrivateKeyId string `json:"private_key_id"`
PrivateKey string `json:"private_key"`
ClientEmail string `json:"client_email"`
ClientId string `json:"client_id"`
AuthUri string `json:"auth_uri"`
TokenUri string `json:"token_uri"`
AuthProviderX50CertUrl string `json:"auth_provider_x509_cert_url"`
ClientX509CertUrl string `json:"client_x509_cert_url"`
}
// CreateServiceAccount creates a new 'ServiceAccount' instance
func CreateServiceAccount(values map[string]string) *ServiceAccount {
return &ServiceAccount{
Type: values[pkgSecret.Type],
ProjectId: values[pkgSecret.ProjectId],
PrivateKeyId: values[pkgSecret.PrivateKeyId],
PrivateKey: values[pkgSecret.PrivateKey],
ClientEmail: values[pkgSecret.ClientEmail],
ClientId: values[pkgSecret.ClientId],
AuthUri: values[pkgSecret.AuthUri],
TokenUri: values[pkgSecret.TokenUri],
AuthProviderX50CertUrl: values[pkgSecret.AuthX509Url],
ClientX509CertUrl: values[pkgSecret.ClientX509Url],
}
}
// createJWTConfig parses credentials from JSON
func createJWTConfig(credentials *ServiceAccount, scope ...string) (*jwt.Config, error) {
jsonConfig, err := json.Marshal(credentials)
if err != nil {
return nil, err
} | func CreateOath2Client(serviceAccount *ServiceAccount, scope ...string) (*http.Client, error) {
if len(scope) == 0 {
// This is here for backward compatibility, but it should probably be explicitly stated everywhere
scope = []string{serviceusage.CloudPlatformScope}
}
config, err := createJWTConfig(serviceAccount, scope...)
if err != nil {
return nil, err
}
return config.Client(context.Background()), nil
} | return google.JWTConfigFromJSON(jsonConfig, scope...)
}
// CreateOath2Client creates a new OAuth2 client with credentials |
utils.py | # NOTE: A place for helper utilities and decorators.
from wtoolzexceptions import exceptions
import flask
import marshmallow
def parse(schema, location):
if location == "args":
p = flask.request.args
elif location == "json":
p = flask.request.json
elif location == "view_args":
p = flask.request.view_args
else:
|
try:
return schema.load(p)
except marshmallow.ValidationError:
exceptions.ohoh(400)
| raise ValueError("location not args, json, or view_args.") |
ast_map.rs | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use abi::AbiSet;
use ast::*;
use ast_util;
use codemap::Span;
use fold::Folder;
use fold;
use parse::token;
use print::pprust;
use util::small_vector::SmallVector;
use std::cell::RefCell;
use std::iter;
use std::vec;
use std::fmt;
use std::vec_ng::Vec;
#[deriving(Clone, Eq)]
pub enum PathElem {
PathMod(Name),
PathName(Name)
}
impl PathElem {
pub fn name(&self) -> Name {
match *self {
PathMod(name) | PathName(name) => name
}
}
}
impl fmt::Show for PathElem {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let slot = token::get_name(self.name());
write!(f.buf, "{}", slot.get())
}
}
#[deriving(Clone)]
struct LinkedPathNode<'a> {
node: PathElem,
next: LinkedPath<'a>,
}
type LinkedPath<'a> = Option<&'a LinkedPathNode<'a>>;
impl<'a> Iterator<PathElem> for LinkedPath<'a> {
fn next(&mut self) -> Option<PathElem> {
match *self {
Some(node) => {
*self = node.next;
Some(node.node)
}
None => None
}
}
}
// HACK(eddyb) move this into libstd (value wrapper for vec::Items).
#[deriving(Clone)]
pub struct Values<'a, T>(vec::Items<'a, T>);
impl<'a, T: Pod> Iterator<T> for Values<'a, T> {
fn next(&mut self) -> Option<T> {
let &Values(ref mut items) = self;
items.next().map(|&x| x)
}
}
/// The type of the iterator used by with_path.
pub type PathElems<'a, 'b> = iter::Chain<Values<'a, PathElem>, LinkedPath<'b>>;
pub fn path_to_str<PI: Iterator<PathElem>>(mut path: PI) -> ~str {
let itr = token::get_ident_interner();
path.fold(~"", |mut s, e| {
let e = itr.get(e.name());
if !s.is_empty() {
s.push_str("::");
}
s.push_str(e.as_slice());
s
})
}
#[deriving(Clone)]
pub enum Node {
NodeItem(@Item),
NodeForeignItem(@ForeignItem),
NodeTraitMethod(@TraitMethod),
NodeMethod(@Method),
NodeVariant(P<Variant>),
NodeExpr(@Expr),
NodeStmt(@Stmt),
NodeArg(@Pat),
NodeLocal(@Pat),
NodeBlock(P<Block>),
/// NodeStructCtor represents a tuple struct.
NodeStructCtor(@StructDef),
}
// The odd layout is to bring down the total size.
#[deriving(Clone)]
enum MapEntry {
// Placeholder for holes in the map.
NotPresent,
// All the node types, with a parent ID.
EntryItem(NodeId, @Item),
EntryForeignItem(NodeId, @ForeignItem),
EntryTraitMethod(NodeId, @TraitMethod),
EntryMethod(NodeId, @Method),
EntryVariant(NodeId, P<Variant>),
EntryExpr(NodeId, @Expr),
EntryStmt(NodeId, @Stmt),
EntryArg(NodeId, @Pat),
EntryLocal(NodeId, @Pat),
EntryBlock(NodeId, P<Block>),
EntryStructCtor(NodeId, @StructDef),
// Roots for node trees.
RootCrate,
RootInlinedParent(P<InlinedParent>)
}
struct InlinedParent {
path: Vec<PathElem> ,
// Required by NodeTraitMethod and NodeMethod.
def_id: DefId
}
impl MapEntry {
fn parent(&self) -> Option<NodeId> {
Some(match *self {
EntryItem(id, _) => id,
EntryForeignItem(id, _) => id,
EntryTraitMethod(id, _) => id,
EntryMethod(id, _) => id,
EntryVariant(id, _) => id,
EntryExpr(id, _) => id,
EntryStmt(id, _) => id,
EntryArg(id, _) => id,
EntryLocal(id, _) => id,
EntryBlock(id, _) => id,
EntryStructCtor(id, _) => id,
_ => return None
})
}
fn to_node(&self) -> Option<Node> {
Some(match *self {
EntryItem(_, p) => NodeItem(p),
EntryForeignItem(_, p) => NodeForeignItem(p),
EntryTraitMethod(_, p) => NodeTraitMethod(p),
EntryMethod(_, p) => NodeMethod(p),
EntryVariant(_, p) => NodeVariant(p),
EntryExpr(_, p) => NodeExpr(p),
EntryStmt(_, p) => NodeStmt(p),
EntryArg(_, p) => NodeArg(p),
EntryLocal(_, p) => NodeLocal(p),
EntryBlock(_, p) => NodeBlock(p),
EntryStructCtor(_, p) => NodeStructCtor(p),
_ => return None
})
}
}
pub struct Map {
/// NodeIds are sequential integers from 0, so we can be
/// super-compact by storing them in a vector. Not everything with
/// a NodeId is in the map, but empirically the occupancy is about
/// 75-80%, so there's not too much overhead (certainly less than
/// a hashmap, since they (at the time of writing) have a maximum
/// of 75% occupancy).
///
/// Also, indexing is pretty quick when you've got a vector and
/// plain old integers.
priv map: RefCell<Vec<MapEntry> >
}
impl Map {
fn find_entry(&self, id: NodeId) -> Option<MapEntry> {
let map = self.map.borrow();
if map.get().len() > id as uint {
Some(*map.get().get(id as uint))
} else {
None
}
}
/// Retrieve the Node corresponding to `id`, failing if it cannot
/// be found.
pub fn get(&self, id: NodeId) -> Node {
match self.find(id) {
Some(node) => node,
None => fail!("couldn't find node id {} in the AST map", id)
}
}
/// Retrieve the Node corresponding to `id`, returning None if
/// cannot be found.
pub fn find(&self, id: NodeId) -> Option<Node> {
self.find_entry(id).and_then(|x| x.to_node())
}
pub fn get_parent(&self, id: NodeId) -> NodeId {
self.find_entry(id).and_then(|x| x.parent()).unwrap_or(id)
}
pub fn get_parent_did(&self, id: NodeId) -> DefId {
let parent = self.get_parent(id);
match self.find_entry(parent) {
Some(RootInlinedParent(data)) => data.def_id,
_ => ast_util::local_def(parent)
}
}
pub fn get_foreign_abis(&self, id: NodeId) -> AbiSet {
let parent = self.get_parent(id);
let abis = match self.find_entry(parent) {
Some(EntryItem(_, i)) => match i.node {
ItemForeignMod(ref nm) => Some(nm.abis),
_ => None
},
// Wrong but OK, because the only inlined foreign items are intrinsics.
Some(RootInlinedParent(_)) => Some(AbiSet::Intrinsic()),
_ => None
};
match abis {
Some(abis) => abis,
None => fail!("expected foreign mod or inlined parent, found {}",
self.node_to_str(parent))
}
}
pub fn get_foreign_vis(&self, id: NodeId) -> Visibility {
let vis = self.expect_foreign_item(id).vis;
match self.find(self.get_parent(id)) {
Some(NodeItem(i)) => vis.inherit_from(i.vis),
_ => vis
}
}
pub fn expect_item(&self, id: NodeId) -> @Item {
match self.find(id) {
Some(NodeItem(item)) => item,
_ => fail!("expected item, found {}", self.node_to_str(id))
}
}
pub fn expect_foreign_item(&self, id: NodeId) -> @ForeignItem {
match self.find(id) {
Some(NodeForeignItem(item)) => item,
_ => fail!("expected foreign item, found {}", self.node_to_str(id))
}
}
pub fn get_path_elem(&self, id: NodeId) -> PathElem {
match self.get(id) {
NodeItem(item) => {
match item.node {
ItemMod(_) | ItemForeignMod(_) => {
PathMod(item.ident.name)
}
_ => PathName(item.ident.name)
}
}
NodeForeignItem(i) => PathName(i.ident.name),
NodeMethod(m) => PathName(m.ident.name),
NodeTraitMethod(tm) => match *tm {
Required(ref m) => PathName(m.ident.name),
Provided(ref m) => PathName(m.ident.name)
},
NodeVariant(v) => PathName(v.node.name.name),
node => fail!("no path elem for {:?}", node)
}
}
pub fn with_path<T>(&self, id: NodeId, f: |PathElems| -> T) -> T {
self.with_path_next(id, None, f)
}
pub fn path_to_str(&self, id: NodeId) -> ~str {
self.with_path(id, |path| path_to_str(path))
}
fn path_to_str_with_ident(&self, id: NodeId, i: Ident) -> ~str {
self.with_path(id, |path| {
path_to_str(path.chain(Some(PathName(i.name)).move_iter()))
})
}
fn with_path_next<T>(&self, id: NodeId, next: LinkedPath, f: |PathElems| -> T) -> T {
let parent = self.get_parent(id);
let parent = match self.find_entry(id) {
Some(EntryForeignItem(..)) | Some(EntryVariant(..)) => {
// Anonymous extern items, enum variants and struct ctors
// go in the parent scope.
self.get_parent(parent)
}
// But tuple struct ctors don't have names, so use the path of its
// parent, the struct item. Similarly with closure expressions.
Some(EntryStructCtor(..)) | Some(EntryExpr(..)) => {
return self.with_path_next(parent, next, f);
}
_ => parent
};
if parent == id {
match self.find_entry(id) {
Some(RootInlinedParent(data)) => {
f(Values(data.path.iter()).chain(next))
}
_ => f(Values([].iter()).chain(next))
}
} else {
self.with_path_next(parent, Some(&LinkedPathNode {
node: self.get_path_elem(id),
next: next
}), f)
}
}
pub fn with_attrs<T>(&self, id: NodeId, f: |Option<&[Attribute]>| -> T) -> T {
let attrs = match self.get(id) {
NodeItem(i) => Some(i.attrs.as_slice()),
NodeForeignItem(fi) => Some(fi.attrs.as_slice()),
NodeTraitMethod(tm) => match *tm {
Required(ref type_m) => Some(type_m.attrs.as_slice()),
Provided(m) => Some(m.attrs.as_slice())
},
NodeMethod(m) => Some(m.attrs.as_slice()),
NodeVariant(ref v) => Some(v.node.attrs.as_slice()),
// unit/tuple structs take the attributes straight from
// the struct definition.
// FIXME(eddyb) make this work again (requires access to the map).
NodeStructCtor(_) => {
return self.with_attrs(self.get_parent(id), f);
}
_ => None
};
f(attrs)
}
pub fn span(&self, id: NodeId) -> Span {
match self.find(id) {
Some(NodeItem(item)) => item.span,
Some(NodeForeignItem(foreign_item)) => foreign_item.span,
Some(NodeTraitMethod(trait_method)) => {
match *trait_method {
Required(ref type_method) => type_method.span,
Provided(ref method) => method.span,
}
}
Some(NodeMethod(method)) => method.span,
Some(NodeVariant(variant)) => variant.span,
Some(NodeExpr(expr)) => expr.span,
Some(NodeStmt(stmt)) => stmt.span,
Some(NodeArg(pat)) | Some(NodeLocal(pat)) => pat.span,
Some(NodeBlock(block)) => block.span,
Some(NodeStructCtor(_)) => self.expect_item(self.get_parent(id)).span,
_ => fail!("node_span: could not find span for id {}", id),
}
}
pub fn node_to_str(&self, id: NodeId) -> ~str {
node_id_to_str(self, id)
}
}
pub trait FoldOps {
fn new_id(&self, id: NodeId) -> NodeId {
id
}
fn new_span(&self, span: Span) -> Span {
span
}
}
pub struct Ctx<'a, F> {
map: &'a Map,
// The node in which we are currently mapping (an item or a method).
// When equal to DUMMY_NODE_ID, the next mapped node becomes the parent.
parent: NodeId,
fold_ops: F
}
impl<'a, F> Ctx<'a, F> {
fn insert(&self, id: NodeId, entry: MapEntry) {
let mut map = self.map.map.borrow_mut();
map.get().grow_set(id as uint, &NotPresent, entry);
}
}
impl<'a, F: FoldOps> Folder for Ctx<'a, F> {
fn new_id(&mut self, id: NodeId) -> NodeId {
let id = self.fold_ops.new_id(id);
if self.parent == DUMMY_NODE_ID {
self.parent = id;
}
id
}
fn new_span(&mut self, span: Span) -> Span {
self.fold_ops.new_span(span)
}
fn fold_item(&mut self, i: @Item) -> SmallVector<@Item> {
let parent = self.parent;
self.parent = DUMMY_NODE_ID;
let i = fold::noop_fold_item(i, self).expect_one("expected one item");
assert_eq!(self.parent, i.id);
match i.node {
ItemImpl(_, _, _, ref ms) => {
for &m in ms.iter() {
self.insert(m.id, EntryMethod(self.parent, m));
}
}
ItemEnum(ref enum_definition, _) => {
for &v in enum_definition.variants.iter() {
self.insert(v.node.id, EntryVariant(self.parent, v));
}
}
ItemForeignMod(ref nm) => {
for &nitem in nm.items.iter() {
self.insert(nitem.id, EntryForeignItem(self.parent, nitem));
}
}
ItemStruct(struct_def, _) => {
// If this is a tuple-like struct, register the constructor.
match struct_def.ctor_id {
Some(ctor_id) => {
self.insert(ctor_id, EntryStructCtor(self.parent,
struct_def));
}
None => {}
}
}
ItemTrait(_, ref traits, ref methods) => {
for t in traits.iter() {
self.insert(t.ref_id, EntryItem(self.parent, i));
}
for tm in methods.iter() {
match *tm {
Required(ref m) => {
self.insert(m.id, EntryTraitMethod(self.parent,
@(*tm).clone()));
}
Provided(m) => {
self.insert(m.id, EntryTraitMethod(self.parent,
@Provided(m)));
}
}
}
}
_ => {}
}
self.parent = parent;
self.insert(i.id, EntryItem(self.parent, i));
SmallVector::one(i)
}
fn fold_pat(&mut self, pat: @Pat) -> @Pat {
let pat = fold::noop_fold_pat(pat, self);
match pat.node {
PatIdent(..) => {
// Note: this is at least *potentially* a pattern...
self.insert(pat.id, EntryLocal(self.parent, pat));
}
_ => {}
}
pat
}
fn fold_expr(&mut self, expr: @Expr) -> @Expr {
let expr = fold::noop_fold_expr(expr, self);
self.insert(expr.id, EntryExpr(self.parent, expr));
expr
}
fn fold_stmt(&mut self, stmt: &Stmt) -> SmallVector<@Stmt> {
let stmt = fold::noop_fold_stmt(stmt, self).expect_one("expected one statement");
self.insert(ast_util::stmt_id(stmt), EntryStmt(self.parent, stmt));
SmallVector::one(stmt)
}
fn fold_method(&mut self, m: @Method) -> @Method {
let parent = self.parent;
self.parent = DUMMY_NODE_ID;
let m = fold::noop_fold_method(m, self);
assert_eq!(self.parent, m.id);
self.parent = parent;
m
}
fn fold_fn_decl(&mut self, decl: &FnDecl) -> P<FnDecl> {
let decl = fold::noop_fold_fn_decl(decl, self);
for a in decl.inputs.iter() {
self.insert(a.id, EntryArg(self.parent, a.pat));
}
decl
}
fn fold_block(&mut self, block: P<Block>) -> P<Block> {
let block = fold::noop_fold_block(block, self);
self.insert(block.id, EntryBlock(self.parent, block));
block
}
}
pub fn map_crate<F: FoldOps>(krate: Crate, fold_ops: F) -> (Crate, Map) {
let map = Map { map: RefCell::new(Vec::new()) };
let krate = {
let mut cx = Ctx {
map: &map,
parent: CRATE_NODE_ID,
fold_ops: fold_ops
};
cx.insert(CRATE_NODE_ID, RootCrate);
cx.fold_crate(krate)
};
if log_enabled!(::log::DEBUG) {
let map = map.map.borrow();
// This only makes sense for ordered stores; note the
// enumerate to count the number of entries.
let (entries_less_1, _) = map.get().iter().filter(|&x| {
match *x {
NotPresent => false,
_ => true
}
}).enumerate().last().expect("AST map was empty after folding?");
let entries = entries_less_1 + 1;
let vector_length = map.get().len();
debug!("The AST map has {} entries with a maximum of {}: occupancy {:.1}%",
entries, vector_length, (entries as f64 / vector_length as f64) * 100.);
}
(krate, map)
}
// Used for items loaded from external crate that are being inlined into this
// crate. The `path` should be the path to the item but should not include
// the item itself.
pub fn map_decoded_item<F: FoldOps>(map: &Map,
path: Vec<PathElem> ,
fold_ops: F,
fold: |&mut Ctx<F>| -> InlinedItem)
-> InlinedItem {
let mut cx = Ctx {
map: map,
parent: DUMMY_NODE_ID,
fold_ops: fold_ops
};
// Generate a NodeId for the RootInlinedParent inserted below.
cx.new_id(DUMMY_NODE_ID);
// Methods get added to the AST map when their impl is visited. Since we
// don't decode and instantiate the impl, but just the method, we have to
// add it to the table now. Likewise with foreign items.
let mut def_id = DefId { krate: LOCAL_CRATE, node: DUMMY_NODE_ID };
let ii = fold(&mut cx);
match ii {
IIItem(_) => {}
IIMethod(impl_did, is_provided, m) => {
let entry = if is_provided {
EntryTraitMethod(cx.parent, @Provided(m))
} else {
EntryMethod(cx.parent, m)
};
cx.insert(m.id, entry);
def_id = impl_did;
}
IIForeign(i) => {
cx.insert(i.id, EntryForeignItem(cx.parent, i));
}
}
cx.insert(cx.parent, RootInlinedParent(P(InlinedParent {
path: path,
def_id: def_id
})));
ii
}
fn node_id_to_str(map: &Map, id: NodeId) -> ~str {
match map.find(id) {
Some(NodeItem(item)) => {
let path_str = map.path_to_str_with_ident(id, item.ident);
let item_str = match item.node {
ItemStatic(..) => "static",
ItemFn(..) => "fn",
ItemMod(..) => "mod",
ItemForeignMod(..) => "foreign mod",
ItemTy(..) => "ty",
ItemEnum(..) => "enum",
ItemStruct(..) => "struct",
ItemTrait(..) => "trait",
ItemImpl(..) => "impl",
ItemMac(..) => "macro"
};
format!("{} {} (id={})", item_str, path_str, id)
}
Some(NodeForeignItem(item)) => {
let path_str = map.path_to_str_with_ident(id, item.ident);
format!("foreign item {} (id={})", path_str, id)
}
Some(NodeMethod(m)) => {
format!("method {} in {} (id={})",
token::get_ident(m.ident),
map.path_to_str(id), id)
}
Some(NodeTraitMethod(ref tm)) => {
let m = ast_util::trait_method_to_ty_method(&**tm);
format!("method {} in {} (id={})",
token::get_ident(m.ident),
map.path_to_str(id), id)
}
Some(NodeVariant(ref variant)) => {
format!("variant {} in {} (id={})",
token::get_ident(variant.node.name),
map.path_to_str(id), id)
}
Some(NodeExpr(expr)) => {
format!("expr {} (id={})", pprust::expr_to_str(expr), id)
}
Some(NodeStmt(stmt)) => {
format!("stmt {} (id={})", pprust::stmt_to_str(stmt), id)
}
Some(NodeArg(pat)) => {
format!("arg {} (id={})", pprust::pat_to_str(pat), id) | Some(NodeBlock(block)) => {
format!("block {} (id={})", pprust::block_to_str(block), id)
}
Some(NodeStructCtor(_)) => {
format!("struct_ctor {} (id={})", map.path_to_str(id), id)
}
None => {
format!("unknown node (id={})", id)
}
}
} | }
Some(NodeLocal(pat)) => {
format!("local {} (id={})", pprust::pat_to_str(pat), id)
} |
mongo.go | package mongo
import (
"context"
"errors"
"fmt"
"net"
"reflect"
"regexp"
"strings"
"sync"
"time"
"unsafe"
"github.com/DataDog/datadog-go/statsd"
"go.mongodb.org/mongo-driver/event"
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/address"
"go.mongodb.org/mongo-driver/mongo/description"
"go.mongodb.org/mongo-driver/mongo/options"
"go.mongodb.org/mongo-driver/mongo/readpref"
"go.mongodb.org/mongo-driver/x/mongo/driver"
"go.mongodb.org/mongo-driver/x/mongo/driver/topology"
"go.uber.org/zap"
"github.com/coinbase/mongobetween/util"
)
const pingTimeout = 60 * time.Second
const disconnectTimeout = 10 * time.Second
type Mongo struct {
log *zap.Logger
statsd *statsd.Client
opts *options.ClientOptions
mu sync.RWMutex
client *mongo.Client
topology *topology.Topology
cursors *cursorCache
roundTripCtx context.Context
roundTripCancel func()
}
func extractTopology(c *mongo.Client) *topology.Topology {
e := reflect.ValueOf(c).Elem()
d := e.FieldByName("deployment")
d = reflect.NewAt(d.Type(), unsafe.Pointer(d.UnsafeAddr())).Elem() // #nosec G103
return d.Interface().(*topology.Topology)
}
func Connect(log *zap.Logger, sd *statsd.Client, opts *options.ClientOptions, ping bool) (*Mongo, error) {
// timeout shouldn't be hit if ping == false, as Connect doesn't block the current goroutine
ctx, cancel := context.WithTimeout(context.Background(), pingTimeout)
defer cancel()
opts = opts.SetPoolMonitor(poolMonitor(sd))
var err error
log.Info("Connect")
c, err := mongo.Connect(ctx, opts)
if err != nil {
return nil, err
}
if ping {
log.Info("Ping")
err = c.Ping(ctx, readpref.Primary())
if err != nil {
return nil, err
}
log.Info("Pong")
}
t := extractTopology(c)
go topologyMonitor(log, t)
rtCtx, rtCancel := context.WithCancel(context.Background())
m := Mongo{
log: log,
statsd: sd,
opts: opts,
client: c,
topology: t,
cursors: newCursorCache(),
roundTripCtx: rtCtx,
roundTripCancel: rtCancel,
}
go m.cursorMonitor()
return &m, nil
}
func poolMonitor(sd *statsd.Client) *event.PoolMonitor {
checkedOut, checkedIn := util.StatsdBackgroundGauge(sd, "pool.checked_out_connections", []string{})
opened, closed := util.StatsdBackgroundGauge(sd, "pool.open_connections", []string{})
return &event.PoolMonitor{
Event: func(e *event.PoolEvent) {
snake := strings.ToLower(regexp.MustCompile("([a-z0-9])([A-Z])").ReplaceAllString(e.Type, "${1}_${2}"))
name := fmt.Sprintf("pool_event.%s", snake)
tags := []string{
fmt.Sprintf("address:%s", e.Address),
fmt.Sprintf("reason:%s", e.Reason),
}
switch e.Type {
case event.ConnectionCreated:
opened(name, tags)
case event.ConnectionClosed:
closed(name, tags)
case event.GetSucceeded:
checkedOut(name, tags)
case event.ConnectionReturned:
checkedIn(name, tags)
default:
_ = sd.Incr(name, tags, 1)
}
},
}
}
func (m *Mongo) Description() description.Topology {
return m.topology.Description()
}
func (m *Mongo) cursorMonitor() {
for {
_ = m.statsd.Gauge("cursors", float64(m.cursors.count()), []string{}, 1)
time.Sleep(1 * time.Second)
}
}
func (m *Mongo) Close() {
m.mu.Lock()
defer m.mu.Unlock()
if m.client == nil {
// already closed
return
}
m.roundTripCancel()
m.log.Info("Disconnect")
ctx, cancel := context.WithTimeout(context.Background(), disconnectTimeout)
defer cancel()
err := m.client.Disconnect(ctx)
m.client = nil
if err != nil {
m.log.Info("Error disconnecting", zap.Error(err))
}
}
func (m *Mongo) RoundTrip(msg *Message, tags []string) (_ *Message, err error) {
m.mu.RLock()
defer m.mu.RUnlock()
var addr address.Address
defer func() {
if err != nil {
cursorID, _ := msg.Op.CursorID()
command, collection := msg.Op.CommandAndCollection()
m.log.Error(
"Round trip error",
zap.Error(err),
zap.Int64("cursor_id", cursorID),
zap.Int32("op_code", int32(msg.Op.OpCode())),
zap.String("address", addr.String()),
zap.String("command", string(command)),
zap.String("collection", collection),
)
}
}()
if m.client == nil {
return nil, errors.New("connection closed")
}
// FIXME this assumes that cursorIDs are unique on the cluster, but two servers can have the same cursorID reference different cursors
requestCursorID, _ := msg.Op.CursorID()
server, err := m.selectServer(requestCursorID)
if err != nil {
return nil, err
}
// FIXME transactions should be pinned to servers, similar to cursors above
conn, err := m.checkoutConnection(server)
if err != nil {
return nil, err
}
addr = conn.Address()
tags = append(
tags,
fmt.Sprintf("address:%s", conn.Address().String()),
)
defer func() {
err := conn.Close()
if err != nil {
m.log.Error("Error closing Mongo connection", zap.Error(err), zap.String("address", addr.String()))
}
}()
// see https://github.com/mongodb/mongo-go-driver/blob/v1.7.2/x/mongo/driver/operation.go#L430-L432
ep, ok := server.(driver.ErrorProcessor)
if !ok {
return nil, errors.New("server ErrorProcessor type assertion failed")
}
unacknowledged := msg.Op.Unacknowledged()
wm, err := m.roundTrip(conn, msg.Wm, unacknowledged, tags)
if err != nil {
m.processError(err, ep, addr, conn)
return nil, err
}
if unacknowledged {
return &Message{}, nil
}
op, err := Decode(wm)
if err != nil {
return nil, err
}
// check if an error is returned in the server response
opErr := op.Error()
if opErr != nil {
// process the error, but don't return it as we still want to forward the response to the client
m.processError(opErr, ep, addr, conn)
}
if responseCursorID, ok := op.CursorID(); ok {
if responseCursorID != 0 {
m.cursors.add(responseCursorID, server)
} else if requestCursorID != 0 {
m.cursors.remove(requestCursorID)
}
}
return &Message{
Wm: wm,
Op: op,
}, nil
}
func (m *Mongo) selectServer(requestCursorID int64) (server driver.Server, err error) {
defer func(start time.Time) {
_ = m.statsd.Timing("server_selection", time.Since(start), []string{fmt.Sprintf("success:%v", err == nil)}, 1)
}(time.Now())
if requestCursorID != 0 {
server, ok := m.cursors.peek(requestCursorID)
if ok {
return server, nil
}
}
selector := description.CompositeSelector([]description.ServerSelector{
description.ReadPrefSelector(readpref.Primary()), // ignored by sharded clusters
description.LatencySelector(15 * time.Millisecond), // default localThreshold for the client
})
return m.topology.SelectServer(m.roundTripCtx, selector)
}
func (m *Mongo) checkoutConnection(server driver.Server) (conn driver.Connection, err error) {
defer func(start time.Time) {
addr := ""
if conn != nil {
addr = conn.Address().String()
}
_ = m.statsd.Timing("checkout_connection", time.Since(start), []string{
fmt.Sprintf("address:%s", addr),
fmt.Sprintf("success:%v", err == nil),
}, 1)
}(time.Now())
conn, err = server.Connection(m.roundTripCtx)
if err != nil {
return nil, err
}
return conn, nil
}
// see https://github.com/mongodb/mongo-go-driver/blob/v1.7.2/x/mongo/driver/operation.go#L664-L681
func (m *Mongo) roundTrip(conn driver.Connection, req []byte, unacknowledged bool, tags []string) (res []byte, err error) {
defer func(start time.Time) {
tags = append(tags, fmt.Sprintf("success:%v", err == nil))
_ = m.statsd.Distribution("request_size", float64(len(req)), tags, 1)
if err == nil && !unacknowledged {
// There is no response size for unacknowledged writes.
_ = m.statsd.Distribution("response_size", float64(len(res)), tags, 1)
}
_ = m.statsd.Timing("round_trip", time.Since(start), tags, 1)
}(time.Now())
if err = conn.WriteWireMessage(m.roundTripCtx, req); err != nil {
return nil, wrapNetworkError(err)
}
if unacknowledged {
return nil, nil
}
if res, err = conn.ReadWireMessage(m.roundTripCtx, req[:0]); err != nil {
return nil, wrapNetworkError(err)
}
return res, nil
}
func wrapNetworkError(err error) error |
// Process the error with the given ErrorProcessor, returning true if processing causes the topology to change
func (m *Mongo) processError(err error, ep driver.ErrorProcessor, addr address.Address, conn driver.Connection) {
last := m.Description()
// gather fields for logging
fields := []zap.Field{
zap.String("address", addr.String()),
zap.Error(err),
}
if derr, ok := err.(driver.Error); ok {
fields = append(fields, zap.Int32("error_code", derr.Code))
fields = append(fields, zap.Strings("error_labels", derr.Labels))
fields = append(fields, zap.NamedError("error_wrapped", derr.Wrapped))
}
if werr, ok := err.(driver.WriteConcernError); ok {
fields = append(fields, zap.Int64("error_code", werr.Code))
}
// process the error
ep.ProcessError(err, conn)
// log if the error changed the topology
if errorChangesTopology(err) {
desc := m.Description()
fields = append(fields, topologyChangedFields(&last, &desc)...)
m.log.Error("Topology changing error", fields...)
}
}
// see https://github.com/mongodb/mongo-go-driver/blob/v1.7.2/x/mongo/driver/topology/server.go#L432-L505
func errorChangesTopology(err error) bool {
if cerr, ok := err.(driver.Error); ok && (cerr.NodeIsRecovering() || cerr.NotMaster()) {
return true
}
if wcerr, ok := err.(driver.WriteConcernError); ok && (wcerr.NodeIsRecovering() || wcerr.NotMaster()) {
return true
}
wrappedConnErr := unwrapConnectionError(err)
if wrappedConnErr == nil {
return false
}
// Ignore transient timeout errors.
if netErr, ok := wrappedConnErr.(net.Error); ok && netErr.Timeout() {
return false
}
if wrappedConnErr == context.Canceled || wrappedConnErr == context.DeadlineExceeded {
return false
}
return true
}
// see https://github.com/mongodb/mongo-go-driver/blob/v1.7.2/x/mongo/driver/topology/server.go#L949-L969
func unwrapConnectionError(err error) error {
connErr, ok := err.(topology.ConnectionError)
if ok {
return connErr.Wrapped
}
driverErr, ok := err.(driver.Error)
if !ok || !driverErr.NetworkError() {
return nil
}
connErr, ok = driverErr.Wrapped.(topology.ConnectionError)
if ok {
return connErr.Wrapped
}
return nil
}
| {
labels := []string{driver.NetworkError}
return driver.Error{Message: err.Error(), Labels: labels, Wrapped: err}
} |
consumers.py | # chat/consumers.py
from channels.generic.websocket import AsyncWebsocketConsumer
import json
class ChatConsumer(AsyncWebsocketConsumer):
async def connect(self):
self.room_name = self.scope['url_route']['kwargs']['room_name']
self.room_group_name = 'chat_%s' % self.room_name
# Join room group
await self.channel_layer.group_add(
self.room_group_name,
self.channel_name
)
await self.accept()
async def disconnect(self, close_code):
# Leave room group
await self.channel_layer.group_discard(
self.room_group_name,
self.channel_name
)
# Receive message from WebSocket
async def receive(self, text_data):
text_data_json = json.loads(text_data)
message = text_data_json['message']
# Send message to room group
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'chat_message', | 'message': message
}
)
# Receive message from room group
async def chat_message(self, event):
message = event['message']
# Send message to WebSocket
await self.send(text_data=json.dumps({
'message': message
})) | |
exception.py | import click
from colorama import Fore, Style
class KsmCliException(click.ClickException):
in_a_shell = False
def colorize(self):
if KsmCliException.in_a_shell is False:
return str(self.message)
else: | return self.colorize()
def __str__(self):
return self.colorize()
class KsmRecordSyntaxException:
pass | return Fore.RED + str(self.message) + Style.RESET_ALL
def format_message(self): |
syntax_tree.rs | /*
* This file is part of the uutils coreutils package.
*
* (c) Roman Gafiyatullin <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
//!
//! Here we employ shunting-yard algorithm for building AST from tokens according to operators' precedence and associativeness.
//! * https://en.wikipedia.org/wiki/Shunting-yard_algorithm
//!
use tokens::{Token};
type TokenStack = Vec<(usize, Token)>;
pub type OperandsList = Vec< Box<ASTNode> >;
#[derive(Debug)]
pub enum ASTNode {
Leaf { token_idx: usize, value: String },
Node { token_idx: usize, op_type: String, operands: OperandsList }
}
impl ASTNode {
fn debug_dump( &self ) {
self.debug_dump_impl( 1 );
}
fn debug_dump_impl( &self, depth: usize ) {
for _ in 0..depth {
print!("\t", );
}
match self {
&ASTNode::Leaf{ ref token_idx, ref value } => println!("Leaf( {} ) at #{} ( evaluate -> {:?} )", value, token_idx, self.evaluate()),
&ASTNode::Node{ ref token_idx, ref op_type, ref operands } => {
println!("Node( {} ) at #{} (evaluate -> {:?})", op_type, token_idx, self.evaluate());
for operand in operands {
operand.debug_dump_impl( depth + 1 );
}
}
}
}
fn new_node( token_idx: usize, op_type: &String, operands: OperandsList ) -> Box<ASTNode> {
Box::new( ASTNode::Node{
token_idx: token_idx,
op_type: op_type.clone(),
operands: operands
} )
}
fn new_leaf( token_idx: usize, value: &String ) -> Box<ASTNode> {
Box::new( ASTNode::Leaf{ token_idx: token_idx, value: value.clone() } )
}
pub fn evaluate( &self ) -> Result<String, String> {
match self {
&ASTNode::Leaf{ ref value, .. } => Ok( value.clone() ),
&ASTNode::Node{ ref op_type, .. } =>
match self.operand_values() {
Err( reason ) => Err( reason ),
Ok( operand_values ) =>
match op_type.as_ref() {
"+" => infix_operator_two_ints( |a: i64, b: i64| Ok( a + b ), &operand_values ),
"-" => infix_operator_two_ints( |a: i64, b: i64| Ok( a - b ), &operand_values ),
"*" => infix_operator_two_ints( |a: i64, b: i64| Ok( a * b ), &operand_values ),
"/" => infix_operator_two_ints(
|a: i64, b: i64|
if b == 0 { Err("division by zero".to_string()) }
else { Ok( a / b ) },
&operand_values ),
"%" => infix_operator_two_ints(
|a: i64, b: i64|
if b == 0 { Err("division by zero".to_string()) }
else { Ok( a % b ) },
&operand_values ),
"=" => infix_operator_two_ints_or_two_strings(
|a: i64, b: i64| Ok( bool_as_int(a == b) ),
|a: &String, b: &String| Ok( bool_as_string(a == b) ),
&operand_values
),
"!=" => infix_operator_two_ints_or_two_strings(
|a: i64, b: i64| Ok( bool_as_int(a != b) ),
|a: &String, b: &String| Ok( bool_as_string(a != b) ),
&operand_values
),
"<" => infix_operator_two_ints_or_two_strings(
|a: i64, b: i64| Ok( bool_as_int(a < b) ),
|a: &String, b: &String| Ok( bool_as_string(a < b) ),
&operand_values
),
">" => infix_operator_two_ints_or_two_strings(
|a: i64, b: i64| Ok( bool_as_int(a > b) ),
|a: &String, b: &String| Ok( bool_as_string(a > b) ),
&operand_values
),
"<=" => infix_operator_two_ints_or_two_strings(
|a: i64, b: i64| Ok( bool_as_int(a <= b) ),
|a: &String, b: &String| Ok( bool_as_string(a <= b) ),
&operand_values
),
">=" => infix_operator_two_ints_or_two_strings(
|a: i64, b: i64| Ok( bool_as_int(a >= b) ),
|a: &String, b: &String| Ok( bool_as_string(a >= b) ),
&operand_values
),
"|" => infix_operator_or(&operand_values),
"&" => infix_operator_and(&operand_values),
"length" => prefix_operator_length( &operand_values ),
"index" => prefix_operator_index( &operand_values ),
"substr" => prefix_operator_substr( &operand_values ),
_ => Err(format!("operation not implemented: {}", op_type))
}
}
}
}
pub fn operand_values( &self ) -> Result<Vec<String>, String> {
if let &ASTNode::Node{ ref operands, .. } = self {
let mut out = Vec::with_capacity( operands.len() );
for operand in operands {
match operand.evaluate() {
Ok( value ) => out.push( value ),
Err( reason ) => return Err( reason ),
}
}
Ok( out )
}
else { panic!("Invoked .operand_values(&self) not with ASTNode::Node") }
}
}
pub fn tokens_to_ast( maybe_tokens: Result< Vec<(usize, Token)>, String > ) -> Result<Box<ASTNode>, String> {
if maybe_tokens.is_err() { Err( maybe_tokens.err().unwrap() ) }
else {
let tokens = maybe_tokens.ok().unwrap();
let mut out_stack: TokenStack = Vec::new();
let mut op_stack: TokenStack = Vec::new();
for (token_idx, token) in tokens {
if let Err( reason ) = push_token_to_either_stack( token_idx, &token, &mut out_stack, &mut op_stack ) {
return Err( reason )
}
}
if let Err( reason ) = move_rest_of_ops_to_out( &mut out_stack, &mut op_stack ) {
return Err( reason )
}
assert!( op_stack.is_empty() );
maybe_dump_rpn( &out_stack );
let result = ast_from_rpn( &mut out_stack );
if !out_stack.is_empty() {
Err( "syntax error (fist RPN token does not represent expression AST's root)".to_string() )
}
else {
maybe_dump_ast( &result );
result
}
}
}
fn maybe_dump_ast( result: &Result< Box<ASTNode>, String > ) {
use std::env;
if let Ok( debug_var ) = env::var( "EXPR_DEBUG_AST" ) {
if debug_var == "1" {
println!("EXPR_DEBUG_AST");
match result {
&Ok( ref ast ) => ast.debug_dump(),
&Err( ref reason ) => println!("\terr: {:?}", reason),
}
}
}
}
fn maybe_dump_rpn( rpn: &TokenStack ) {
use std::env;
if let Ok( debug_var ) = env::var( "EXPR_DEBUG_RPN" ) {
if debug_var == "1" {
println!("EXPR_DEBUG_RPN");
for token in rpn {
println!("\t{:?}", token);
}
}
}
}
fn ast_from_rpn( rpn: &mut TokenStack ) -> Result<Box<ASTNode>, String> {
match rpn.pop() {
None => Err( "syntax error (premature end of expression)".to_string() ),
Some( (token_idx, Token::Value{ value }) ) =>
Ok( ASTNode::new_leaf( token_idx, &value ) ),
Some( (token_idx, Token::InfixOp{ value, .. }) ) =>
maybe_ast_node( token_idx, &value, 2, rpn ),
Some( (token_idx, Token::PrefixOp{ value, arity }) ) =>
maybe_ast_node( token_idx, &value, arity, rpn ),
Some( (token_idx, unexpected_token) ) =>
panic!("unexpected token at #{} {:?}", token_idx, unexpected_token),
}
}
fn maybe_ast_node( token_idx: usize, op_type: &String, arity: usize, rpn: &mut TokenStack ) -> Result< Box<ASTNode>, String > {
let mut operands = Vec::with_capacity( arity );
for _ in 0..arity {
match ast_from_rpn( rpn ) {
Err( reason ) => return Err( reason ),
Ok( operand ) => operands.push( operand ),
}
}
operands.reverse();
Ok( ASTNode::new_node( token_idx, op_type, operands ) )
}
fn move_rest_of_ops_to_out( out_stack: &mut TokenStack, op_stack: &mut TokenStack ) -> Result<(), String> {
loop {
match op_stack.pop() {
None => return Ok( () ),
Some( (token_idx, Token::ParOpen) ) => return Err( format!( "syntax error (Mismatched open-parenthesis at #{})", token_idx ) ),
Some( (token_idx, Token::ParClose) ) => return Err( format!( "syntax error (Mismatched close-parenthesis at #{})", token_idx ) ),
Some( other ) => out_stack.push( other )
}
}
}
fn push_token_to_either_stack( token_idx: usize, token: &Token, out_stack: &mut TokenStack, op_stack: &mut TokenStack ) -> Result<(), String> {
let result =
match token {
&Token::Value{ .. } => Ok( out_stack.push( (token_idx, token.clone()) ) ),
&Token::InfixOp{ .. } =>
if op_stack.is_empty() { Ok( op_stack.push( (token_idx, token.clone()) ) ) }
else { push_op_to_stack( token_idx, token, out_stack, op_stack ) },
&Token::PrefixOp{ .. } => Ok( op_stack.push( (token_idx, token.clone()) ) ),
&Token::ParOpen => Ok( op_stack.push( (token_idx, token.clone()) ) ),
&Token::ParClose => move_till_match_paren( out_stack, op_stack )
};
maybe_dump_shunting_yard_step( token_idx, token, out_stack, op_stack, &result );
result
}
fn maybe_dump_shunting_yard_step( token_idx: usize, token: &Token, out_stack: &TokenStack, op_stack: &TokenStack, result: &Result<(), String> ) {
use std::env;
if let Ok( debug_var ) = env::var( "EXPR_DEBUG_SYA_STEP" ) {
if debug_var == "1" {
println!("EXPR_DEBUG_SYA_STEP");
println!("\t{} => {:?}", token_idx, token);
println!("\t\tout: {:?}", out_stack);
println!("\t\top : {:?}", op_stack);
println!("\t\tresult: {:?}", result);
}
}
}
fn push_op_to_stack( token_idx: usize, token: &Token, out_stack: &mut TokenStack, op_stack: &mut TokenStack ) -> Result<(), String> {
if let &Token::InfixOp{ precedence: prec, left_assoc: la, .. } = token {
loop {
match op_stack.last() {
None =>
return Ok( op_stack.push( (token_idx, token.clone()) ) ),
Some( &(_, Token::ParOpen) ) =>
return Ok( op_stack.push( (token_idx, token.clone()) ) ),
Some( &(_, Token::InfixOp{ precedence: prev_prec, .. }) ) =>
if la && prev_prec >= prec
|| !la && prev_prec > prec {
out_stack.push( op_stack.pop().unwrap() )
}
else {
return Ok( op_stack.push( (token_idx, token.clone()) ) )
},
Some( &(_, Token::PrefixOp{ .. }) ) =>
return Ok( op_stack.push( (token_idx, token.clone()) ) ),
Some( _ ) => panic!("Non-operator on op_stack")
}
}
}
else {
panic!("Expected infix-op")
}
}
fn move_till_match_paren( out_stack: &mut TokenStack, op_stack: &mut TokenStack ) -> Result<(), String> {
loop {
match op_stack.pop() {
None => return Err( "syntax error (Mismatched close-parenthesis)".to_string() ),
Some( (_, Token::ParOpen) ) => return Ok( () ),
Some( other ) => out_stack.push( other )
}
}
}
fn infix_operator_two_ints<F>( f: F, values: &Vec<String> ) -> Result<String, String>
where F : Fn( i64, i64 ) -> Result<i64, String>
{
assert!( values.len() == 2 );
if let Some( left ) = values[0].parse::<i64>().ok() {
if let Some( right ) = values[1].parse::<i64>().ok() {
return match f( left, right ) {
Ok(result) => Ok(result.to_string()),
Err(reason) => Err(reason),
}
}
}
Err( "Expected an integer operand".to_string() )
}
fn infix_operator_two_ints_or_two_strings<FI, FS>( fi: FI, fs: FS, values: &Vec<String> ) -> Result<String, String>
where FI : Fn( i64, i64 ) -> Result<i64, String>,
FS : Fn( &String, &String ) -> Result<String, String>
{
assert!( values.len() == 2 );
if let ( Some( a_int ), Some( b_int ) ) =
(
values[0].parse::<i64>().ok(),
values[1].parse::<i64>().ok()
) {
match fi( a_int, b_int ) {
Ok( result ) => Ok(result.to_string()),
Err( reason ) => Err(reason)
}
}
else {
fs( &values[0], &values[1] )
}
}
fn infix_operator_or( values: &Vec<String> ) -> Result<String, String> {
assert!(values.len() == 2);
if value_as_bool(&values[0]) {
Ok(values[0].clone())
} else {
Ok(values[1].clone())
}
}
fn infix_operator_and( values: &Vec<String> ) -> Result<String, String> {
if value_as_bool(&values[0]) && value_as_bool(&values[1]) {
Ok(values[0].clone())
} else {
Ok(0.to_string())
}
}
fn prefix_operator_length( values: &Vec<String> ) -> Result<String, String> {
assert!( values.len() == 1 );
Ok( values[0].len().to_string() )
}
fn prefix_operator_index( values: &Vec<String> ) -> Result<String, String> {
assert!( values.len() == 2 );
let haystack = &values[0];
let needles = &values[1];
let mut current_idx = 0;
for ch_h in haystack.chars() {
current_idx += 1;
for ch_n in needles.chars() {
if ch_n == ch_h {
return Ok( current_idx.to_string() )
}
}
}
Ok( "0".to_string() )
}
fn | ( values: &Vec<String> ) -> Result<String, String> {
assert!( values.len() == 3 );
let subj = &values[0];
let mut idx = match values[1].parse::<i64>() {
Ok( i ) => i,
Err( _ ) => return Err( "expected integer as POS arg to 'substr'".to_string() ),
};
let mut len = match values[2].parse::<i64>() {
Ok( i ) => i,
Err( _ ) => return Err( "expected integer as LENGTH arg to 'substr'".to_string() ),
};
if idx <= 0 || len <= 0 { return Ok( "".to_string() ) }
let mut out_str = String::new();
for ch in subj.chars() {
idx -= 1;
if idx <= 0 {
if len <= 0 { break; }
len -= 1;
out_str.push( ch );
}
}
Ok( out_str )
}
fn bool_as_int( b: bool ) -> i64 { if b { 1 } else { 0 } }
fn bool_as_string( b: bool ) -> String { if b { "1".to_string() } else { "0".to_string() } }
fn value_as_bool( s: &str ) -> bool {
if s.len() == 0 {
return false
}
match s.parse::<i64>() {
Ok(n) => n != 0,
Err(_) => true,
}
}
| prefix_operator_substr |
lib.rs | #[derive(Debug)]
pub struct | {
length: u32,
width: u32,
}
impl Rectangle {
pub fn can_hold(&self, other: &Rectangle) -> bool {
self.length > other.length && self.width > other.width
}
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() { assert_eq!(2 + 2, 4); }
#[test]
fn another() { panic!("Error!"); }
use super::*;
#[test]
fn larger_can_hold_smaller() {
let larger = Rectangle { length: 8, width: 7 };
let smaller = Rectangle { length: 5, width: 1 };
assert!(larger.can_hold(&smaller));
}
#[test]
fn smaller_cannot_hold_larger() {
let larger = Rectangle { length: 8, width: 7 };
let smaller = Rectangle { length: 5, width: 1 };
assert!(!smaller.can_hold(&larger));
}
}
| Rectangle |
web.console.module.ts | import {DynamicModule, HttpModule, Module} from '@nestjs/common';
import _ from 'lodash';
import {ConsoleModuleAsyncOptions, ConsoleModuleOptions, DEFAULT_OPTIONS} from "./console.types";
import {WebConsoleControllerFactory} from "./controllers/web.console.controller";
import {ALL_COMMANDS} from "./services/all.commands";
import {RemoteConsoleService} from "./services/remote.console.service";
import {TempFileService} from "./services/temp.file.service";
import {WebConsoleService} from "./services/web.console.service";
@Module({})
export class WebConsoleModule {
static forRootAsync(options: ConsoleModuleAsyncOptions): DynamicModule { | imports: [
...!imports ? [] : imports,
HttpModule
],
exports: [
WebConsoleService,
TempFileService,
...ALL_COMMANDS,
],
module: WebConsoleModule,
providers: [
WebConsoleService,
RemoteConsoleService,
TempFileService,
{
provide: 'CONFIG_ROOT_OPTIONS',
useFactory: async (
...args: any[]
) => {
const options = await useFactory(...args);
return _.merge(DEFAULT_OPTIONS, {...options, endpoint});
},
inject: !inject ? [] : [...inject]
},
...ALL_COMMANDS,
]
}
}
static forRoot(options: ConsoleModuleOptions): DynamicModule {
const {imports = [], commands = [], endpoint} = options;
const consoleOptions = _.merge(DEFAULT_OPTIONS, options.options);
ALL_COMMANDS.push(...commands);
return {
controllers: [WebConsoleControllerFactory(endpoint)],
imports: [
...!imports ? [] : imports,
HttpModule
],
exports: [
WebConsoleService,
TempFileService,
...ALL_COMMANDS
],
module: WebConsoleModule,
providers: [
WebConsoleService,
RemoteConsoleService,
TempFileService,
{
provide: 'CONFIG_ROOT_OPTIONS',
useValue: {...consoleOptions, endpoint},
},
...ALL_COMMANDS,
]
}
}
} | const {imports = [], commands = [], useFactory, inject, endpoint} = options;
ALL_COMMANDS.push(...commands);
return {
controllers: [WebConsoleControllerFactory(endpoint)], |
runtime.rs | use scoped_tls::scoped_thread_local;
use crate::driver::Driver;
use crate::scheduler::{LocalScheduler, TaskQueue};
// use crate::task::task_impl::spawn_local;
use crate::task::waker_fn::{dummy_waker, set_poll, should_poll};
use crate::task::{new_task, JoinHandle};
use crate::time::driver::Handle as TimeHandle;
use std::future::Future;
scoped_thread_local!(pub(crate) static CURRENT: Context);
pub(crate) struct Context {
/// Thread id(not the kernel thread id but a generated unique number)
#[cfg(feature = "sync")]
pub(crate) thread_id: usize,
/// Thread unpark handles
#[cfg(feature = "sync")]
pub(crate) unpark_cache:
std::cell::RefCell<fxhash::FxHashMap<usize, crate::driver::UnparkHandle>>,
/// Waker sender cache
#[cfg(feature = "sync")]
pub(crate) waker_sender_cache:
std::cell::RefCell<fxhash::FxHashMap<usize, flume::Sender<std::task::Waker>>>,
/// Owned task set and local run queue
pub(crate) tasks: TaskQueue,
/// Time Handle
pub(crate) time_handle: Option<TimeHandle>,
}
impl Default for Context {
fn default() -> Self {
Self::new()
}
}
impl Context {
pub(crate) fn new_with_time_handle(time_handle: TimeHandle) -> Self {
Self {
time_handle: Some(time_handle),
..Self::new()
}
}
pub(crate) fn new() -> Self {
#[cfg(feature = "sync")]
let thread_id = crate::builder::BUILD_THREAD_ID.with(|id| *id);
Self {
#[cfg(feature = "sync")]
thread_id,
#[cfg(feature = "sync")]
unpark_cache: std::cell::RefCell::new(fxhash::FxHashMap::default()),
#[cfg(feature = "sync")]
waker_sender_cache: std::cell::RefCell::new(fxhash::FxHashMap::default()),
tasks: TaskQueue::default(),
time_handle: None,
}
}
#[allow(unused)]
#[cfg(feature = "sync")]
pub(crate) fn unpark_thread(&self, id: usize) {
use crate::driver::{thread::get_unpark_handle, Unpark};
if let Some(handle) = self.unpark_cache.borrow().get(&id) {
handle.unpark();
return;
}
if let Some(v) = get_unpark_handle(id) {
// Write back to local cache
let w = v.clone();
self.unpark_cache.borrow_mut().insert(id, w);
v.unpark();
return;
}
debug_assert!(false, "thread to unpark has not been registered");
}
#[allow(unused)]
#[cfg(feature = "sync")]
pub(crate) fn send_waker(&self, id: usize, w: std::task::Waker) {
use crate::driver::thread::get_waker_sender;
if let Some(sender) = self.waker_sender_cache.borrow().get(&id) {
let _ = sender.send(w);
return; | let _ = s.send(w);
self.waker_sender_cache.borrow_mut().insert(id, s);
return;
}
debug_assert!(false, "sender has not been registered");
}
}
/// Monoio runtime
pub struct Runtime<D> {
pub(crate) driver: D,
pub(crate) context: Context,
}
impl<D> Runtime<D> {
/// Block on
pub fn block_on<F>(&mut self, future: F) -> F::Output
where
F: Future,
D: Driver,
{
assert!(
!CURRENT.is_set(),
"Can not start a runtime inside a runtime"
);
let waker = dummy_waker();
let cx = &mut std::task::Context::from_waker(&waker);
self.driver.with(|| {
CURRENT.set(&self.context, || {
#[cfg(feature = "sync")]
let join = unsafe { spawn_without_static(future) };
#[cfg(not(feature = "sync"))]
let join = future;
pin_utils::pin_mut!(join);
set_poll();
loop {
loop {
// Consume all tasks(with max round to prevent io starvation)
let mut max_round = self.context.tasks.len() * 2;
while let Some(t) = self.context.tasks.pop() {
t.run();
if max_round == 0 {
// maybe there's a looping task
break;
} else {
max_round -= 1;
}
}
// Check main future
if should_poll() {
// check if ready
if let std::task::Poll::Ready(t) = join.as_mut().poll(cx) {
return t;
}
}
if self.context.tasks.is_empty() {
// No task to execute, we should wait for io blockingly
// Hot path
break;
}
// Cold path
let _ = self.driver.submit();
}
// Wait and Process CQ
let _ = self.driver.park();
}
})
})
}
}
/// Spawns a new asynchronous task, returning a [`JoinHandle`] for it.
///
/// Spawning a task enables the task to execute concurrently to other tasks.
/// There is no guarantee that a spawned task will execute to completion. When a
/// runtime is shutdown, all outstanding tasks are dropped, regardless of the
/// lifecycle of that task.
///
///
/// [`JoinHandle`]: monoio::task::JoinHandle
///
/// # Examples
///
/// In this example, a server is started and `spawn` is used to start a new task
/// that processes each received connection.
///
/// ```no_run
/// monoio::start(async {
/// let handle = monoio::spawn(async {
/// println!("hello from a background task");
/// });
///
/// // Let the task complete
/// handle.await;
/// });
/// ```
pub fn spawn<T>(future: T) -> JoinHandle<T::Output>
where
T: Future + 'static,
T::Output: 'static,
{
#[cfg(not(feature = "sync"))]
let (task, join) = new_task(future, LocalScheduler);
#[cfg(feature = "sync")]
let (task, join) = new_task(
crate::utils::thread_id::get_current_thread_id(),
future,
LocalScheduler,
);
CURRENT.with(|ctx| {
ctx.tasks.push(task);
});
join
}
#[cfg(feature = "sync")]
unsafe fn spawn_without_static<T>(future: T) -> JoinHandle<T::Output>
where
T: Future,
{
use crate::task::new_task_holding;
let (task, join) = new_task_holding(
crate::utils::thread_id::get_current_thread_id(),
future,
LocalScheduler,
);
CURRENT.with(|ctx| {
ctx.tasks.push(task);
});
join
} | }
if let Some(s) = get_waker_sender(id) {
// Write back to local cache |
contactProfilesCreateOrUpdateSample.js | /*
* Copyright (c) Microsoft Corporation.
* Licensed under the MIT License.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
*/
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
const { AzureOrbital } = require("@azure/arm-orbital");
const { DefaultAzureCredential } = require("@azure/identity");
/**
* This sample demonstrates how to Creates or updates a contact profile
*
* @summary Creates or updates a contact profile
* x-ms-original-file: specification/orbital/resource-manager/Microsoft.Orbital/stable/2022-03-01/examples/ContactProfileCreate.json
*/
async function createAContactProfile() {
const subscriptionId = "subid";
const resourceGroupName = "rg1";
const contactProfileName = "AQUA_DIRECTPLAYBACK_WITH_UPLINK";
const location = "westus";
const credential = new DefaultAzureCredential();
const client = new AzureOrbital(credential, subscriptionId); | const result = await client.contactProfiles.beginCreateOrUpdateAndWait(
resourceGroupName,
contactProfileName,
location
);
console.log(result);
}
createAContactProfile().catch(console.error); | |
storm.go | package storm
import (
"bytes"
"encoding/binary"
"os"
"time"
"github.com/Schinti95/storm/codec"
"github.com/Schinti95/storm/codec/json"
"github.com/Schinti95/storm/index"
"github.com/Schinti95/storm/q"
"github.com/boltdb/bolt"
)
const (
dbinfo = "__storm_db"
metadataBucket = "__storm_metadata"
)
// Defaults to json
var defaultCodec = json.Codec
// Open opens a database at the given path with optional Storm options.
func Open(path string, stormOptions ...func(*DB) error) (*DB, error) {
var err error
s := &DB{
Path: path,
codec: defaultCodec,
}
for _, option := range stormOptions {
if err = option(s); err != nil {
return nil, err
}
}
if s.boltMode == 0 {
s.boltMode = 0600
}
if s.boltOptions == nil {
s.boltOptions = &bolt.Options{Timeout: 1 * time.Second}
}
s.root = &node{s: s, rootBucket: s.rootBucket, codec: s.codec, batchMode: s.batchMode}
// skip if UseDB option is used
if s.Bolt == nil {
s.Bolt, err = bolt.Open(path, s.boltMode, s.boltOptions)
if err != nil {
return nil, err
}
err = s.checkVersion()
if err != nil {
return nil, err
}
}
return s, nil
}
// DB is the wrapper around BoltDB. It contains an instance of BoltDB and uses it to perform all the
// needed operations
type DB struct {
// Path of the database file
Path string
// Handles encoding and decoding of objects
codec codec.MarshalUnmarshaler
// Bolt is still easily accessible
Bolt *bolt.DB
// Bolt file mode
boltMode os.FileMode
// Bolt options
boltOptions *bolt.Options
// Enable auto increment on empty integer fields
autoIncrement bool
// The root node that points to the root bucket.
root *node
// The root bucket name
rootBucket []string
// Enable batch mode for read-write transaction, instead of update mode
batchMode bool
}
// From returns a new Storm node with a new bucket root.
// All DB operations on the new node will be executed relative to the given
// bucket.
func (s *DB) From(root ...string) Node {
newNode := *s.root
newNode.rootBucket = root
return &newNode
}
// WithTransaction returns a New Storm node that will use the given transaction.
func (s *DB) WithTransaction(tx *bolt.Tx) Node {
return s.root.WithTransaction(tx)
}
// Bucket returns the root bucket name as a slice.
// In the normal, simple case this will be empty.
func (s *DB) Bucket() []string {
return s.root.Bucket()
}
// Close the database
func (s *DB) Close() error {
return s.Bolt.Close()
}
// Codec returns the EncodeDecoder used by this instance of Storm
func (s *DB) Codec() codec.MarshalUnmarshaler {
return s.codec
}
// WithCodec returns a New Storm Node that will use the given Codec.
func (s *DB) WithCodec(codec codec.MarshalUnmarshaler) Node {
n := s.From().(*node)
n.codec = codec
return n
}
// WithBatch returns a new Storm Node with the batch mode enabled.
func (s *DB) WithBatch(enabled bool) Node {
n := s.From().(*node)
n.batchMode = enabled
return n
}
// Get a value from a bucket
func (s *DB) Get(bucketName string, key interface{}, to interface{}) error {
return s.root.Get(bucketName, key, to)
}
// Set a key/value pair into a bucket
func (s *DB) Set(bucketName string, key interface{}, value interface{}) error {
return s.root.Set(bucketName, key, value)
}
// Delete deletes a key from a bucket
func (s *DB) Delete(bucketName string, key interface{}) error {
return s.root.Delete(bucketName, key)
}
// GetBytes gets a raw value from a bucket.
func (s *DB) GetBytes(bucketName string, key interface{}) ([]byte, error) {
return s.root.GetBytes(bucketName, key)
}
// SetBytes sets a raw value into a bucket.
func (s *DB) SetBytes(bucketName string, key interface{}, value []byte) error {
return s.root.SetBytes(bucketName, key, value)
}
// Save a structure
func (s *DB) Save(data interface{}) error {
return s.root.Save(data)
}
// PrefixScan scans the root buckets for keys matching the given prefix.
func (s *DB) PrefixScan(prefix string) []Node {
return s.root.PrefixScan(prefix)
}
// RangeScan scans the root buckets over a range such as a sortable time range.
func (s *DB) RangeScan(min, max string) []Node {
return s.root.RangeScan(min, max)
}
// Select a list of records that match a list of matchers. Doesn't use indexes.
func (s *DB) Select(matchers ...q.Matcher) Query {
return s.root.Select(matchers...)
}
// Range returns one or more records by the specified index within the specified range
func (s *DB) Range(fieldName string, min, max, to interface{}, options ...func(*index.Options)) error {
return s.root.Range(fieldName, min, max, to, options...)
}
// AllByIndex gets all the records of a bucket that are indexed in the specified index
func (s *DB) AllByIndex(fieldName string, to interface{}, options ...func(*index.Options)) error {
return s.root.AllByIndex(fieldName, to, options...)
}
// All get all the records of a bucket
func (s *DB) All(to interface{}, options ...func(*index.Options)) error {
return s.root.All(to, options...)
}
// Count counts all the records of a bucket
func (s *DB) Count(data interface{}) (int, error) {
return s.root.Count(data)
}
// DeleteStruct deletes a structure from the associated bucket
func (s *DB) DeleteStruct(data interface{}) error {
return s.root.DeleteStruct(data)
}
// Remove deletes a structure from the associated bucket
// Deprecated: Use DeleteStruct instead.
func (s *DB) Remove(data interface{}) error {
return s.root.DeleteStruct(data)
}
// Drop a bucket
func (s *DB) Drop(data interface{}) error {
return s.root.Drop(data)
}
// Find returns one or more records by the specified index
func (s *DB) Find(fieldName string, value interface{}, to interface{}, options ...func(q *index.Options)) error {
return s.root.Find(fieldName, value, to, options...)
}
// Init creates the indexes and buckets for a given structure
func (s *DB) Init(data interface{}) error {
return s.root.Init(data)
}
// ReIndex rebuilds all the indexes of a bucket
func (s *DB) ReIndex(data interface{}) error {
return s.root.ReIndex(data)
}
// One returns one record by the specified index
func (s *DB) One(fieldName string, value interface{}, to interface{}) error {
return s.root.One(fieldName, value, to)
}
// Begin starts a new transaction.
func (s *DB) Begin(writable bool) (Node, error) {
return s.root.Begin(writable)
}
// Rollback closes the transaction and ignores all previous updates.
func (s *DB) Rollback() error {
return s.root.Rollback()
}
// Commit writes all changes to disk.
func (s *DB) Commit() error {
return s.root.Rollback()
}
// Update a structure
func (s *DB) Update(data interface{}) error {
return s.root.Update(data)
}
// UpdateField updates a single field
func (s *DB) UpdateField(data interface{}, fieldName string, value interface{}) error {
return s.root.UpdateField(data, fieldName, value)
}
// CreateBucketIfNotExists creates the bucket below the current node if it doesn't
// already exist.
func (s *DB) CreateBucketIfNotExists(tx *bolt.Tx, bucket string) (*bolt.Bucket, error) {
return s.root.CreateBucketIfNotExists(tx, bucket)
}
// GetBucket returns the given bucket below the current node.
func (s *DB) GetBucket(tx *bolt.Tx, children ...string) *bolt.Bucket {
return s.root.GetBucket(tx, children...)
}
func (s *DB) checkVersion() error {
var v string
err := s.Get(dbinfo, "version", &v)
if err != nil && err != ErrNotFound {
return err
}
// for now, we only set the current version if it doesn't exist or if v0.5.0
if v == "" || v == "0.5.0" || v == "0.6.0" {
return s.Set(dbinfo, "version", Version)
}
return nil
}
// toBytes turns an interface into a slice of bytes
func toBytes(key interface{}, codec codec.MarshalUnmarshaler) ([]byte, error) |
func numbertob(v interface{}) ([]byte, error) {
var buf bytes.Buffer
err := binary.Write(&buf, binary.BigEndian, v)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func numberfromb(raw []byte) (int64, error) {
r := bytes.NewReader(raw)
var to int64
err := binary.Read(r, binary.BigEndian, &to)
if err != nil {
return 0, err
}
return to, nil
}
| {
if key == nil {
return nil, nil
}
switch t := key.(type) {
case []byte:
return t, nil
case string:
return []byte(t), nil
case int:
return numbertob(int64(t))
case uint:
return numbertob(uint64(t))
case int8, int16, int32, int64, uint8, uint16, uint32, uint64:
return numbertob(t)
default:
return codec.Marshal(key)
}
} |
sessions.rs | use crate::messages::system::PacketIn;
use crate::messages::system::PacketsOut;
use crate::messages::system::SessionCommand;
use crate::messages::system::SessionCommandAction;
use crate::messages::StanzaEnvelope;
use crate::router::Router;
use crate::sessions::state::StaticSessionState;
use crate::sessions::state::StaticSessionStateBuilder;
use crate::{
messages::{system::RegisterSession, system::UnregisterSession, SessionManagementPacketError, SessionManagementPacketResult, SessionManagementPacketResultBuilder},
packet::{PacketHandler, StanzaHandler},
sessions::manager::SessionManager,
sessions::state::SessionState,
};
use actix::prelude::*;
use jid::{BareJid, FullJid, Jid};
use log::{error, trace};
use std::convert::TryInto;
use std::str::FromStr;
use uuid::Uuid;
use xmpp_proto::{ns, Bind, CloseStream, Features, FromXmlElement, GenericIq, IqType, NonStanza, OpenStream, Packet, Stanza, StreamError, StreamErrorKind, StreamFeatures};
use xmpp_xml::Element;
pub(crate) mod manager;
pub(crate) mod state;
pub(crate) mod unauthenticated;
const ACTIVE_SESSION_STATES: &'static [SessionState] = &[SessionState::Binding, SessionState::Binded];
/// Hold a session on a node
pub struct Session {
pub(crate) state: SessionState,
pub(crate) sink: Recipient<PacketsOut>,
jid: Jid,
self_addr: Addr<Self>,
}
impl Session {
pub(crate) fn new(state: StaticSessionState, self_addr: Addr<Self>, sink: Recipient<PacketsOut>) -> Self {
Self {
state: SessionState::Opening,
sink,
jid: state.jid.unwrap(),
self_addr,
}
}
pub(crate) fn sync_state(&mut self, StaticSessionState { state, jid, .. }: &StaticSessionState) {
self.state = *state;
if let Some(jid) = jid {
self.jid = jid.clone();
}
}
}
impl TryInto<StaticSessionState> for &mut Session {
type Error = ();
fn try_into(self) -> Result<StaticSessionState, Self::Error> {
let addr = self.self_addr.clone().recipient::<SessionCommand>();
StaticSessionState::builder().state(self.state).jid(self.jid.clone()).addr_session_command(addr).build().map_err(|_| ())
}
}
impl Actor for Session {
type Context = Context<Self>;
fn started(&mut self, _ctx: &mut Self::Context) {
trace!("Starting Session");
}
fn stopping(&mut self, _ctx: &mut Self::Context) -> actix::Running {
trace!("Stopping Session");
let _ = SessionManager::from_registry().try_send(UnregisterSession { jid: self.jid.clone() });
actix::Running::Stop
}
fn stopped(&mut self, _ctx: &mut Self::Context) {
trace!("Session Stopped");
}
}
impl Handler<SessionCommand> for Session {
type Result = Result<(), ()>;
fn handle(&mut self, msg: SessionCommand, ctx: &mut Self::Context) -> Self::Result {
match msg.0 {
SessionCommandAction::SendPacket(packet) => {
let _ = self.sink.try_send(PacketsOut(vec![packet]));
Ok(())
}
SessionCommandAction::Kill => {
if let Ok(result) = Self::close(&mut SessionManagementPacketResultBuilder::default()) {
self.sync_state(&result.session_state);
let _ = self.sink.try_send(PacketsOut(result.packets));
ctx.stop();
}
Ok(())
}
}
}
}
impl Handler<PacketIn> for Session {
type Result = ResponseActFuture<Self, ()>;
/// Different packets can be handled
/// IQ(type=set, session_based_action) -> handle by the session itself
/// IQ(...) -> Route to router
/// Presence(self) -> handle by the session itself
/// Presence(...) -> Route to router
/// _ -> Route to router
fn handle(&mut self, msg: PacketIn, _ctx: &mut Self::Context) -> Self::Result {
let state = self.try_into().unwrap();
let fut = async move {
trace!("Handle packet in session");
Self::handle_packet(state, &msg.0).await
};
Box::pin(fut.into_actor(self).map(|res, act, _ctx| {
if let Ok(result) = res {
act.sync_state(&result.session_state);
// TODO: Handle better
let _ = act.sink.try_send(PacketsOut(result.packets));
}
}))
}
}
#[async_trait::async_trait]
impl PacketHandler for Session {
type Result = Result<SessionManagementPacketResult, SessionManagementPacketError>;
async fn handle_packet(state: StaticSessionState, stanza: &Packet) -> Self::Result {
match stanza {
Packet::NonStanza(stanza) => <Self as StanzaHandler<_>>::handle(state, &**stanza).await,
Packet::Stanza(stanza) if ACTIVE_SESSION_STATES.contains(&state.state) => <Self as StanzaHandler<_>>::handle(state, &**stanza).await,
Packet::InvalidPacket(invalid_packet) => {
let mut response = SessionManagementPacketResultBuilder::default();
Self::handle_invalid_packet(state, invalid_packet, &mut response)
}
_ => Err(SessionManagementPacketError::Unknown),
}
}
}
#[async_trait::async_trait]
impl StanzaHandler<Stanza> for Session {
async fn handle(state: StaticSessionState, stanza: &Stanza) -> Result<SessionManagementPacketResult, SessionManagementPacketError> {
let fut = match stanza {
Stanza::IQ(stanza) if state.state == SessionState::Binding => <Self as StanzaHandler<_>>::handle(state, stanza),
stanza if state.state == SessionState::Binded => {
Router::from_registry().send(StanzaEnvelope { stanza: stanza.clone(), from: state }).await;
Box::pin(async { Err(SessionManagementPacketError::Unknown) })
}
_ => Box::pin(async { Err(SessionManagementPacketError::Unknown) }),
};
fut.await
}
}
#[async_trait::async_trait]
impl StanzaHandler<NonStanza> for Session {
async fn handle(state: StaticSessionState, stanza: &NonStanza) -> Result<SessionManagementPacketResult, SessionManagementPacketError> {
let fut = match stanza {
NonStanza::OpenStream(stanza) => <Self as StanzaHandler<_>>::handle(state, stanza),
// NonStanza::StartTls(stanza) => Self::handle(state, stanza),
// NonStanza::Auth(stanza) => Self::handle(state, stanza),
// NonStanza::StreamError(stanza) => Self::handle(state, stanza),
NonStanza::CloseStream(stanza) => <Self as StanzaHandler<_>>::handle(state, stanza),
_ => Box::pin(async { Err(SessionManagementPacketError::Unknown) }),
};
fut.await
}
}
#[async_trait::async_trait]
impl StanzaHandler<CloseStream> for Session {
async fn handle(_state: StaticSessionState, _stanza: &CloseStream) -> Result<SessionManagementPacketResult, SessionManagementPacketError> {
Ok(SessionManagementPacketResultBuilder::default()
.session_state(SessionState::Closing)
.packet(CloseStream {}.into())
.build()?)
}
}
#[async_trait::async_trait]
impl StanzaHandler<OpenStream> for Session {
async fn handle(state: StaticSessionState, stanza: &OpenStream) -> Result<SessionManagementPacketResult, SessionManagementPacketError> {
let mut response = SessionManagementPacketResultBuilder::default();
response
.packet(
OpenStream {
id: Some(Uuid::new_v4().to_string()),
to: stanza.from.as_ref().map(|jid| BareJid::from(jid.clone()).into()),
// TODO: Replace JID crate with another?
// TODO: Validate FQDN
from: Jid::from_str("localhost").ok(),
// TODO: Validate lang input
lang: "en".into(),
version: "1.0".to_string(),
}
.into(),
)
.session_state(state.state);
if stanza.version != "1.0" {
return Ok(response
.packet(
StreamError {
kind: StreamErrorKind::UnsupportedVersion,
}
.into(),
)
.packet(CloseStream {}.into())
.session_state(SessionState::Closing)
.build()?);
}
if stanza.to.as_ref().map(|t| t.to_string()) != Some("localhost".into()) {
return Ok(response
.packet(StreamError { kind: StreamErrorKind::HostUnknown }.into())
.packet(CloseStream {}.into())
.session_state(SessionState::Closing)
.build()?);
}
match state.state {
SessionState::Opening => {
response
.packet(StreamFeatures { features: Features::Bind }.into())
.session_state(StaticSessionStateBuilder::default().state(SessionState::Binding).build().unwrap());
}
state => {
error!("Action({:?}) at this stage isn't possible", state);
return Self::not_authorized_and_close(&mut response);
}
}
Ok(response.build()?)
}
}
#[async_trait::async_trait]
impl StanzaHandler<GenericIq> for Session {
async fn handle(state: StaticSessionState, stanza: &GenericIq) -> Result<SessionManagementPacketResult, SessionManagementPacketError> |
}
| {
let mut response = SessionManagementPacketResultBuilder::default();
response.session_state(state.state);
if stanza.get_type() == IqType::Set {
match state.state {
SessionState::Binding => {
// We expect a binding command here
match stanza.get_element() {
Some(element) => {
match element.find((ns::BIND, "bind")) {
Some(bind_element) => {
let bindd = Bind::from_element(bind_element).unwrap();
let old_jid = state.jid.clone().unwrap();
let jid = FullJid::new(old_jid.clone().node().unwrap(), old_jid.domain(), bindd.resource.unwrap());
match SessionManager::from_registry()
.send(RegisterSession {
jid: Jid::Full(jid.clone()),
referer: state.get_addr().unwrap(),
})
.await
.unwrap()
{
Ok(_) => {
let mut result_element = Element::new_with_namespaces((ns::STREAM, "iq"), element);
result_element
.set_attr("id", stanza.get_id())
.set_attr("type", "result")
.append_new_child((ns::BIND, "bind"))
.append_new_child((ns::BIND, "jid"))
.set_text(jid.to_string());
let result = GenericIq::from_element(&result_element).unwrap();
// its bind
response
.session_state(state.clone().set_state(SessionState::Binded).set_jid(Jid::Full(jid.clone())))
.packet(result.into());
}
Err(_) => {
trace!("Error binding session");
return Err(SessionManagementPacketError::Unknown);
}
}
}
None => {
trace!("Something failed in Binding");
return Err(SessionManagementPacketError::Unknown);
}
}
}
None => {
trace!("IQ without element");
return Err(SessionManagementPacketError::Unknown);
}
}
}
_ => {
// trace!("Unsupported state {:?}", e);
// return Err(SessionManagementPacketError::Unknown);
}
}
}
Ok(response.build()?)
} |
parameterGroup.d.ts | import { ResourceBase } from '../resource';
import { Value } from '../dataTypes'; | Description: Value<string>;
Properties?: {
[key: string]: Value<string>;
};
}
export default class ParameterGroup extends ResourceBase {
constructor(properties?: ParameterGroupProperties);
} | export interface ParameterGroupProperties {
CacheParameterGroupFamily: Value<string>; |
index.tsx | /**
* @file component/Suggestion/index.tsx
* @author robzizo < [email protected] >
* @description Represents the suggestion component for select item from suggests from api call.
* Documented by: robzizo
* Date of documentation: 2017-07-23
* Reviewed by: -
* Date of review: -
*/
import * as React from 'react';
import {debounce} from 'lodash';
// import {findIndex} from 'lodash/array';
import {Input, Button, message} from 'antd';
import {PlaceChips, UserChips, IcoN} from 'components';
import {LabelChips} from 'components/Chips/label';
import {IChipsItem} from 'components/Chips';
import {IPlace} from 'api/interfaces';
import SystemApi from 'api/system/';
import SearchApi from 'api/search';
import LabelApi from 'api/label';
import FileUtil from 'services/utils/file';
import CONFIG from '../../config';
const style = require('./suggestion.css');
const unknownPicture = require('assets/icons/absents_place.svg');
interface ISuggestProps { | mode?: string;
placeholder?: string;
editable?: boolean;
}
interface ISuggestState {
suggests?: any[];
selectedItems?: any[];
activeItem?: IChipsItem;
input: string;
placeholder?: string;
editable?: boolean;
}
/**
*
* @class Suggestion
* @classdesc get suggestions and handle selecting them and notify parent
* @extends {React.Component<ISuggestProps, ISuggestState>}
*/
class Suggestion extends React.Component<ISuggestProps, ISuggestState> {
/**
* @prop systemConstApi
* @desc An instance of base Api
* @private
* @memberof Suggestion
*/
private systemConstApi;
private mode: string = 'place';
private targetLimit: number;
/**
* Define the `searchApi`
* @private
*/
private searchApi: SearchApi;
private LabelApi: LabelApi;
/**
* Define the `debouncedFillSuggests` to get suggestions appropriated to the query
* @private
*/
private debouncedFillSuggestsCompose: (val: string) => void;
private debouncedFillSuggestsUsers: (val: string) => void;
/**
* @constructor
* Creates an instance of Suggestion.
* @param {ISuggestProps} props
* @memberof Suggestion
*/
constructor(props: any) {
super(props);
/**
* Initial state object
* @default
* @type {object}
* @property {array} suggests - list of suggested items from server
* @property {array} selectedItems - selected items from suggests
* @property {objecy} activeItem - a item is foussed and selected
* @property {objecy} input - input value ( model )
*/
this.state = {
suggests: [],
selectedItems: props.selectedItems || [],
placeholder: props.placeholder,
activeItem: null,
input: null,
editable: props.editable === false ? false : true,
};
if (props.mode) {
this.mode = props.mode;
}
// assign debouncedFillSuggests
// Prevents the call stack and wasting resources
this.debouncedFillSuggestsCompose = debounce(this.fillComposeSuggests, 372);
this.debouncedFillSuggestsUsers = debounce(this.fillUserSuggests, 372);
// assign searchApi
this.LabelApi = new LabelApi();
this.searchApi = new SearchApi();
this.systemConstApi = new SystemApi();
}
public load(items: IChipsItem[]) {
this.setState({
selectedItems: items,
});
}
public componentWillReceiveProps(nProps) {
this.setState({
editable: nProps.editable,
placeholder: nProps.placeholder,
});
}
public componentWillMount() {
if (this.mode === 'place') {
this.initPlace();
}
}
private initPlace = () => {
this.systemConstApi.get().then((result) => {
this.targetLimit = result.post_max_targets || 10;
});
}
/**
* clears the suggested items array
* @function clearSuggests
* @memberof Suggestion
*/
public clearSuggests = () => {
this.setState({
suggests: [],
});
}
/**
* this function calls after any change in component input and
* call the `debouncedFillSuggests` to fill suggests list
* @function handleInputChange
* @private
* @param {any} event
* @memberof Suggestion
*/
private handleInputChange(event) {
this.setState({
input: event.currentTarget.value,
});
if (this.mode === 'place') {
this.debouncedFillSuggestsCompose(event.currentTarget.value);
} else {
this.debouncedFillSuggestsUsers(event.currentTarget.value);
}
}
/**
* determine which key is pressed and make proper decesion
* @example back space in empty input, remove last selected item
* @function keyDownInputVal
* @private
* @param {any} event
* @memberof Suggestion
*/
private keyDownInputVal(event) {
event.persist();
/**
* @const val - input value
* @type string
*/
const val = event.currentTarget.value;
/**
* removes the last selected item whenever the input is empty and
* the backspace key is pressed.
*/
if (event.key === 'Backspace' && val.length === 0) {
const array = this.state.selectedItems;
array.splice(this.state.selectedItems.length - 1, 1);
this.setState({
selectedItems: array,
});
this.props.onSelectedItemsChanged(array);
}
/**
* adds first suggest to selected items on enter key press whenever the
* input is filled
*/
if ((event.key === 'Enter' || event.keyCode === 13) && val.length > 0) {
const array = this.state.suggests;
if (CONFIG().REGEX.EMAIL.test(val)) {
array.push({
_id: val,
name: val,
});
}
this.insertChip(array[0]);
}
if (event.keyCode === 32 || event.keyCode === 188) {
event.preventDefault();
const firstSuggestItem = this.state.suggests[0];
if (firstSuggestItem && firstSuggestItem._id === this.state.input) {
this.insertChip(this.state.suggests[0]);
} else if (this.state.input && this.state.input.length > 1) {
this.insertChip({
_id: this.state.input,
name: null,
picture: null,
});
}
}
}
/**
* calls `getSuggests` function and set suggests items in component state
* @private
* @async
* @param {string} query
* @returns {Promise<any>}
* @memberof Suggestion
*/
private fillComposeSuggests(query: string): Promise<any> {
query = query || '';
return this.getComposeSuggests(query).then((items: IChipsItem[]) => {
if (query.length > 0) {
// const index = findIndex(items, {_id: query});
const index = items.findIndex((s) => s._id === query);
let item: IChipsItem;
if (index === -1) {
item = {
_id: query,
name: query,
};
if (items.length > 1) {
items.splice(4, 0, item);
} else {
items.push(item);
}
}
}
this.setState({
suggests: items,
});
});
}
private fillUserSuggests(query: string): Promise<any> {
query = query || '';
return this.getUserSuggests(query).then((items: IChipsItem[]) => {
this.setState({
suggests: items,
});
});
}
/**
* get suggests items from api call
* @private
* @async
* @param {string} query
* @returns {Promise<any>}
* @memberof Suggestion
*/
private getComposeSuggests(query: string): Promise<any> {
return new Promise((resolve) => {
this.searchApi.searchForCompose({
keyword: query,
limit: 13,
}).then((response) => {
let placesCount: number = 2;
let recipientsCount: number = 1;
const places: IPlace[] = response.places.filter((i) => {
return this.state.selectedItems.findIndex((s) => s._id === i._id) === -1;
});
const recipients: string[] = response.recipients.filter((i) => {
return this.state.selectedItems.findIndex((s) => s._id === i) === -1;
});
// we must have just 3 items to suggest
// and the 3rd item should be a recipient
if (recipients.length === 0) {
recipientsCount = 0;
placesCount = 3;
}
const items: IChipsItem[] = places.slice(0, placesCount).map((i) => {
const item: IChipsItem = {
_id: i._id,
name: i.name,
picture: i.picture,
};
return item;
}).concat(recipients.slice(0, recipientsCount).map((i) => {
const item: IChipsItem = {
_id: i,
name: null,
picture: null,
};
return item;
}));
resolve(items);
}, () => {
message.error('Could not suggest any recipients right now');
});
});
}
private getUserSuggests(query: string): Promise<any> {
return new Promise((resolve) => {
const apiResponse = (result) => {
const users = result.filter((i) => {
return this.state.selectedItems.findIndex((s) => s._id === i._id) === -1;
});
const items = users.slice(0, 3);
resolve(items);
};
const apiError = () => {
message.error('Could not suggest any user right now');
};
if (this.mode === 'user') {
this.searchApi.searchForUsers({
keyword: query,
limit: 3,
}).then(apiResponse, apiError);
} else {
this.LabelApi.search({
skip: 0,
limit: 3,
filter: 'all',
keyword: query,
details: true,
}).then(apiResponse, apiError);
}
});
}
/**
* bold the search query in suggested item text
* @private
* @param {string} text
* @param {string} keyword
* @returns {string}
* @memberof Suggestion
*/
private mark = (text: string, keyword: string): string => {
if (!keyword) {
return text;
}
if (!text) {
return text;
}
const index = text.toLowerCase().indexOf(keyword.toLowerCase());
if (index === -1) {
return text;
}
return text.replace(keyword, `<b>${keyword}</b>`);
}
/**
* insert the chips to selected items and updates state also notifies parent
* @private
* @param {IChipsItem} item
* @memberof Suggestion
*/
private insertChip = (item: IChipsItem) => {
// prevent exceed maximum compose recipients.
// TODO notify user
if (this.state.selectedItems.length === this.targetLimit) {
return;
}
/**
* prevent to add multiple an item
*/
if (this.state.selectedItems.findIndex((i) => i._id === item._id) > -1) {
return;
}
const items = [...this.state.selectedItems, item];
this.props.onSelectedItemsChanged(items);
this.setState({
selectedItems: items,
input: null,
suggests: [],
});
}
/**
* register handler for input focus in component
* removes the active state of all chips and calls `fillSuggests`
* @private
* @memberof Suggestion
*/
private handleInputFocus = () => {
this.setState({
activeItem: null,
});
if (this.mode === 'place') {
this.fillComposeSuggests(this.state.input);
} else if (this.mode === 'label') {
this.fillUserSuggests(this.state.input);
} else {
this.fillUserSuggests(this.state.input);
}
}
/**
* register handler for clicking on chips
* set active state to chips and make other chipss diactive
* @private
* @memberof Suggestion
*/
private handleChipsClick = (item: IChipsItem) => {
this.setState({
activeItem: item,
suggests: [],
});
}
/**
* remove active item from selected items
* @private
* @memberof Suggestion
*/
private removeItem = () => {
const index = this.state.selectedItems.indexOf(this.state.activeItem);
const items = [...this.state.selectedItems.slice(0, index), ...this.state.selectedItems.slice(index + 1)];
this.props.onSelectedItemsChanged(items);
this.setState({
selectedItems: items,
activeItem: null,
});
}
/**
* get picture of item
* @returns {string}
* @private
* @memberof Suggestion
*/
private getPicture = (item: IChipsItem) => {
return item.picture && item.picture.x64
? FileUtil.getViewUrl(item.picture.x64)
: unknownPicture;
}
/**
* @function render
* @description Renders the component
* @returns {ReactElement} markup
* @memberof Suggestion
* @lends Suggestion
*/
public render() {
// tempFunctions for binding this and pass TSX hint
/**
* @const tempFunctionChange binds `this` to function
* @type function
*/
const tempFunctionChange = this.handleInputChange.bind(this);
/**
* @const tempFunctionKeydown binds `this` to function
* @type function
*/
const tempFunctionKeydown = this.keyDownInputVal.bind(this);
/**
* @const listItems - render Jsx elements of suggestions
* @type Tsx element
*/
const listItems = this.state.suggests.map((item) => {
let name = item.name || item.title || item.fullName || item._id;
if (item.fname) {
name = item.fname + ' ' + item.lname;
}
return (
<li key={item._id}
onClick={this.insertChip.bind(this, item)}>
{this.props.mode !== 'label' && <img src={this.getPicture(item)} alt=""/>}
{this.props.mode === 'label' && (
<div className={style[item.code]}>
<IcoN size={24} name={'tag24'}/>
</div>
)}
<div>
<p dangerouslySetInnerHTML={{__html: this.mark(name, this.state.input)}}/>
</div>
</li>
);
});
let recipients;
/**
* @const recipients - render Jsx elements of selected items
* @type Tsx element
*/
switch (this.mode) {
default:
case 'place':
recipients = this.state.selectedItems.map((item) => {
return (
<PlaceChips key={item._id} active={this.state.activeItem && item._id === this.state.activeItem._id}
onChipsClick={this.handleChipsClick} item={item}/>
);
});
break;
case 'user':
recipients = this.state.selectedItems.map((item) => {
return (
<UserChips key={item._id} active={this.state.activeItem && item._id === this.state.activeItem._id}
onChipsClick={this.handleChipsClick} user={item} editable={this.state.editable}/>
);
});
break;
case 'label':
recipients = this.state.selectedItems.map((item) => {
return (
<LabelChips key={item._id} active={this.state.activeItem && item._id === this.state.activeItem._id}
onChipsClick={this.handleChipsClick} label={item} editable={this.state.editable}/>
);
});
break;
}
return (
<div className={style.suggestionWrpper}>
<div className={style.input}>
{this.mode === 'place' && (
<span>
With:
</span>
)}
{/* selected Items */}
{recipients}
{this.state.editable && (
<Input
onChange={tempFunctionChange}
onKeyDown={tempFunctionKeydown}
onFocus={this.handleInputFocus}
value={this.state.input}
placeholder={this.props.placeholder}
/>
)}
</div>
{/* suggestion Items */}
{
this.state.suggests.length > 0 &&
(
<ul className={style.suggests}>
{listItems}
</ul>
)
}
{/* element for actions on active item */}
{this.state.activeItem &&
(
<div className={style.selectItemControl}>
<span>{this.state.activeItem._id}</span>
<Button size="large" onClick={this.removeItem.bind(this, '')}>Remove</Button>
</div>
)}
</div>
);
}
}
export {Suggestion} | selectedItems?: any[];
onSelectedItemsChanged: (items: any[]) => void; |
main_test.go | package main
import (
"testing"
"github.com/quasilyte/parsing-in-go/phpdoc"
"github.com/quasilyte/parsing-in-go/phpdoctest"
)
type parserWrapper struct {
parser *TypeParser
}
func (p *parserWrapper) Parse(s string) (phpdoc.Type, error) {
typ := p.parser.Parse(s)
var conv converter
return conv.Convert(typ.Expr), nil
}
func TestMain(t *testing.T) {
parser := &parserWrapper{
parser: NewTypeParser(),
}
phpdoctest.Run(t, parser)
}
func BenchmarkParser(b *testing.B) {
parser := &parserWrapper{
parser: NewTypeParser(), | }
phpdoctest.RunBenchmark(b, parser)
} | |
main.rs | use std::convert::TryInto;
/// Gets all divisors of a number, including itself
fn get_divisors(n: u32) -> Vec<u32> {
let mut results = Vec::new();
for i in 1..(n / 2 + 1) {
if n % i == 0 {
results.push(i);
}
}
results.push(n);
results
}
/// Calculates whether the divisors can be partitioned into two disjoint
/// sets that sum to the same value
fn is_summable(x: i32, divisors: &[u32]) -> bool {
if !divisors.is_empty() {
if divisors.contains(&(x as u32)) {
return true;
} else if let Some((first, t)) = divisors.split_first() {
return is_summable(x - *first as i32, t) || is_summable(x, t);
}
}
false
}
/// Calculates whether the number is a Zumkeller number
/// Zumkeller numbers are the set of numbers whose divisors can be partitioned
/// into two disjoint sets that sum to the same value. Each sum must contain
/// divisor values that are not in the other sum, and all of the divisors must
/// be in one or the other.
fn is_zumkeller_number(number: u32) -> bool {
if number % 18 == 6 || number % 18 == 12 {
return true;
}
let div = get_divisors(number);
let divisor_sum: u32 = div.iter().sum();
if divisor_sum == 0 {
return false;
}
if divisor_sum % 2 == 1 {
return false;
}
// numbers where n is odd and the abundance is even are Zumkeller numbers
let abundance = divisor_sum as i32 - 2 * number as i32;
if number % 2 == 1 && abundance > 0 && abundance % 2 == 0 {
return true;
}
let half = divisor_sum / 2;
return div.contains(&half)
|| (div.iter().filter(|&&d| d < half).count() > 0
&& is_summable(half.try_into().unwrap(), &div));
}
fn main() {
println!("\nFirst 220 Zumkeller numbers:");
let mut counter: u32 = 0;
let mut i: u32 = 0;
while counter < 220 {
if is_zumkeller_number(i) {
print!("{:>3}", i);
counter += 1;
print!("{}", if counter % 20 == 0 { "\n" } else { "," });
}
i += 1;
}
println!("\nFirst 40 odd Zumkeller numbers:");
let mut counter: u32 = 0;
let mut i: u32 = 3;
while counter < 40 {
if is_zumkeller_number(i) {
print!("{:>5}", i);
counter += 1;
print!("{}", if counter % 20 == 0 { "\n" } else { "," });
}
i += 2;
}
}
#[cfg(test)]
mod tests { |
#[test]
fn test_is_zumkeller() {
assert_eq!(is_zumkeller_number(0), false);
assert_eq!(is_zumkeller_number(6), true);
assert_eq!(is_zumkeller_number(20), true);
assert_eq!(is_zumkeller_number(21), false);
assert_eq!(is_zumkeller_number(198), true);
}
} | use super::is_zumkeller_number; |
report.go | package main
import (
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"github.com/nokia/ntt/internal/fs"
"github.com/nokia/ntt/internal/ntt"
"github.com/nokia/ntt/k3"
"github.com/nokia/ntt/project"
)
type Report struct {
Args []string `json:"args"`
Name string `json:"name"`
Timeout float64 `json:"timeout"`
ParametersDir string `json:"parameters_dir"`
ParametersFile string `json:"parameters_file"`
TestHook string `json:"test_hook"`
SourceDir string `json:"source_dir"`
DataDir string `json:"datadir"`
SessionID int `json:"session_id"`
Environ []string `json:"env"`
Sources []string `json:"sources"`
Imports []string `json:"imports"`
Files []string `json:"files"`
AuxFiles []string `json:"aux_files"`
OssInfo string `json:"ossinfo"`
K3 struct {
Compiler string `json:"compiler"`
Runtime string `json:"runtime"`
Builtins []string `json:"builtins"`
} `json:"k3"`
suite *ntt.Suite
err error
}
func (r *Report) Err() string {
if r.err != nil {
return r.err.Error()
}
return ""
}
func NewReport(args []string) *Report {
var err error = nil
r := Report{Args: args}
r.suite, r.err = ntt.NewFromArgs(args...)
if r.err == nil {
r.Name, r.err = r.suite.Name()
}
if r.err == nil {
r.Timeout, r.err = r.suite.Timeout()
}
r.ParametersDir, err = r.suite.ParametersDir()
if (r.err == nil) && (err != nil) {
r.err = err
}
r.ParametersFile, err = path(r.suite.ParametersFile())
if (r.err == nil) && (err != nil) {
r.err = err
}
r.TestHook, err = path(r.suite.TestHook())
if (r.err == nil) && (err != nil) {
r.err = err
}
r.DataDir, err = r.suite.Getenv("NTT_DATADIR")
if (r.err == nil) && (err != nil) {
r.err = err
}
if env, err := r.suite.Getenv("NTT_SESSION_ID"); err == nil {
r.SessionID, err = strconv.Atoi(env)
if (r.err == nil) && (err != nil) {
r.err = err
}
} else {
if r.err == nil {
r.err = err
}
}
r.Environ, err = r.suite.Environ()
if err == nil {
sort.Strings(r.Environ)
}
if (r.err == nil) && (err != nil) {
r.err = err
}
{
paths, err := r.suite.Sources()
r.Sources = paths
if (r.err == nil) && (err != nil) {
r.err = err
}
}
{
paths, err := r.suite.Imports()
r.Imports = paths
if (r.err == nil) && (err != nil) {
r.err = err
}
}
r.Files, err = project.Files(r.suite)
if (r.err == nil) && (err != nil) {
r.err = err
}
if root := r.suite.Root(); root != "" {
r.SourceDir = root
if path, err := filepath.Abs(r.SourceDir); err == nil {
r.SourceDir = path
} else if r.err == nil {
r.err = err
}
}
for _, dir := range k3.FindAuxiliaryDirectories() {
r.AuxFiles = append(r.AuxFiles, fs.FindTTCN3Files(dir)...)
}
r.K3.Compiler = k3.Compiler()
r.K3.Runtime = k3.Runtime()
r.OssInfo, _ = r.suite.Getenv("OSSINFO")
hint := filepath.Dir(r.K3.Runtime)
switch {
// Probably a regular K3 installation. We assume datadir and libdir are
// in a sibling folder.
case strings.HasSuffix(hint, "/bin"):
r.K3.Builtins = collectFolders(
hint+"/../lib*/k3/plugins",
hint+"/../lib*/k3/plugins/ttcn3",
hint+"/../lib/*/k3/plugins",
hint+"/../lib/*/k3/plugins/ttcn3",
hint+"/../share/k3/ttcn3",
)
if r.OssInfo == "" {
r.OssInfo = filepath.Clean(hint + "/../share/k3/asn1")
}
// If the runtime seems to be a buildtree of our source repository, we
// assume the builtins are there as well.
case strings.HasSuffix(hint, "/src/k3r"):
// TODO(5nord) the last glob fails if CMAKE_BUILD_DIR is not
// beneath CMAKE_SOURCE_DIR. Find a way to locate the source
// dir correctly.
srcdir := hint + "/../../.."
r.K3.Builtins = collectFolders(
hint+"/../k3r-*-plugin",
srcdir+"/src/k3r-*-plugin",
srcdir+"/src/ttcn3",
srcdir+"/src/libzmq",
)
}
return &r
}
func collectFolders(globs ...string) []string {
return removeDuplicates(filterFolders(evalSymlinks(resolveGlobs(globs))))
}
func resolveGlobs(globs []string) []string {
var ret []string
for _, g := range globs {
if matches, err := filepath.Glob(g); err == nil {
ret = append(ret, matches...)
}
}
return ret
}
func evalSymlinks(links []string) []string {
var ret []string
for _, l := range links {
if path, err := filepath.EvalSymlinks(l); err == nil {
ret = append(ret, path)
}
}
return ret
}
func filterFolders(paths []string) []string {
var ret []string
for _, path := range paths {
info, err := os.Stat(path)
if err != nil {
continue
}
if info.IsDir() {
ret = append(ret, path)
}
}
return ret
}
func removeDuplicates(slice []string) []string {
var ret []string
h := make(map[string]bool)
for _, s := range slice {
if _, v := h[s]; !v {
h[s] = true
ret = append(ret, s)
} | }
func path(f *fs.File, err error) (string, error) {
if f == nil {
return "", err
}
return f.Path(), err
} | }
return ret |
lang.rs | //! Tests auto-converted from "sass-spec/spec/non_conformant/scss/lang.hrx"
#[test]
fn test() | {
assert_eq!(
crate::rsass(
"h1:lang(as),h1:lang(bn),h1:lang(gu),h1:lang(hi),h1:lang(kn),h1:lang(ml),h1:lang(mr),h1:lang(or),h1:lang(pa),h1:lang(sa),h1:lang(ta),h1:lang(te) {\
\n line-height:1.5em !important\
\n}\
\nh2:lang(as),h3:lang(as),h4:lang(as),h5:lang(as),h6:lang(as),h2:lang(bn),h3:lang(bn),h4:lang(bn),h5:lang(bn),h6:lang(bn),h2:lang(gu),h3:lang(gu),h4:lang(gu),h5:lang(gu),h6:lang(gu),h2:lang(hi),h3:lang(hi),h4:lang(hi),h5:lang(hi),h6:lang(hi),h2:lang(kn),h3:lang(kn),h4:lang(kn),h5:lang(kn),h6:lang(kn),h2:lang(ml),h3:lang(ml),h4:lang(ml),h5:lang(ml),h6:lang(ml),h2:lang(mr),h3:lang(mr),h4:lang(mr),h5:lang(mr),h6:lang(mr),h2:lang(or),h3:lang(or),h4:lang(or),h5:lang(or),h6:lang(or),h2:lang(pa),h3:lang(pa),h4:lang(pa),h5:lang(pa),h6:lang(pa),h2:lang(sa),h3:lang(sa),h4:lang(sa),h5:lang(sa),h6:lang(sa),h2:lang(ta),h3:lang(ta),h4:lang(ta),h5:lang(ta),h6:lang(ta),h2:lang(te),h3:lang(te),h4:lang(te),h5:lang(te),h6:lang(te)\
\n{\
\n line-height:1.2em\
\n}\
\nol:lang(bcc) li,ol:lang(bqi) li,ol:lang(fa) li,ol:lang(glk) li,ol:lang(kk-arab) li,ol:lang(mzn) li {\
\n list-style-type:-moz-persian;list-style-type:persian\
\n}\
\nol:lang(ckb) li {\
\n list-style-type:-moz-arabic-indic;list-style-type:arabic-indic\
\n}\
\nol:lang(as) li,ol:lang(bn) li{\
\n list-style-type:-moz-bengali;list-style-type:bengali\
\n}\
\nol:lang(or) li {\
\n list-style-type:-moz-oriya;list-style-type:oriya\
\n}"
)
.unwrap(),
"h1:lang(as), h1:lang(bn), h1:lang(gu), h1:lang(hi), h1:lang(kn), h1:lang(ml), h1:lang(mr), h1:lang(or), h1:lang(pa), h1:lang(sa), h1:lang(ta), h1:lang(te) {\
\n line-height: 1.5em !important;\
\n}\
\nh2:lang(as), h3:lang(as), h4:lang(as), h5:lang(as), h6:lang(as), h2:lang(bn), h3:lang(bn), h4:lang(bn), h5:lang(bn), h6:lang(bn), h2:lang(gu), h3:lang(gu), h4:lang(gu), h5:lang(gu), h6:lang(gu), h2:lang(hi), h3:lang(hi), h4:lang(hi), h5:lang(hi), h6:lang(hi), h2:lang(kn), h3:lang(kn), h4:lang(kn), h5:lang(kn), h6:lang(kn), h2:lang(ml), h3:lang(ml), h4:lang(ml), h5:lang(ml), h6:lang(ml), h2:lang(mr), h3:lang(mr), h4:lang(mr), h5:lang(mr), h6:lang(mr), h2:lang(or), h3:lang(or), h4:lang(or), h5:lang(or), h6:lang(or), h2:lang(pa), h3:lang(pa), h4:lang(pa), h5:lang(pa), h6:lang(pa), h2:lang(sa), h3:lang(sa), h4:lang(sa), h5:lang(sa), h6:lang(sa), h2:lang(ta), h3:lang(ta), h4:lang(ta), h5:lang(ta), h6:lang(ta), h2:lang(te), h3:lang(te), h4:lang(te), h5:lang(te), h6:lang(te) {\
\n line-height: 1.2em;\
\n}\
\nol:lang(bcc) li, ol:lang(bqi) li, ol:lang(fa) li, ol:lang(glk) li, ol:lang(kk-arab) li, ol:lang(mzn) li {\
\n list-style-type: -moz-persian;\
\n list-style-type: persian;\
\n}\
\nol:lang(ckb) li {\
\n list-style-type: -moz-arabic-indic;\
\n list-style-type: arabic-indic;\
\n}\
\nol:lang(as) li, ol:lang(bn) li {\
\n list-style-type: -moz-bengali;\
\n list-style-type: bengali;\
\n}\
\nol:lang(or) li {\
\n list-style-type: -moz-oriya;\
\n list-style-type: oriya;\
\n}\
\n"
);
} |
|
mxw.ts | const mxw = (<any>global).mxw; | export { mxw }; |
|
index.js | export { default as Cards } from "./Cards/Cards";
export { default as Chart } from "./Chart/Chart";
export { default as CountryPicker } from "./CountryPicker/CountryPicker" | ||
compare_fit_ac_fc.py | import numpy as np
import pandas as pd
import os
import sys
sys.path.insert(0, "../../../ctg/core")
#from fit_ac_fc import *
from fit_ac_fc2 import Counts
from config import config
abundance_file = os.path.join(config.A549_test, "A549_abundance_thresholds.txt")
counts_file = os.path.join(config.A549_test, "A549_timepoint_counts.txt")
#times = np.array([[3,14, 21, 28], [3,14,21,28]])
times = np.array([3,14,21,28])
#ac, fc, allbad, sdfc, df, p_t, lfdr, names, lmbda, xfit, mask = fit_ac_fc(abundance_file, counts_file, times)
#ac, fc, allbad, sdfc, df, p_t, lfdr = fit_ac_fc(abundance_file, counts_file, times)
#ac, fc, allbad, sdfc, p_t, names = fit_ac_fc(abundance_file, counts_file, times)
filesList = ['a1_ctg.csv',
'a2_ctg.csv',
'allbad_ctg.csv',
'df_ctg.csv',
'fc_ctg.csv',
'lfdr_fc_ctg.csv',
'p_t_ctg.csv',
'sdfc_ctg.csv',
'lambda1_ctg.csv',
'lambda2_ctg.csv',
'xfit1_ctg.csv',
'xfit2_ctg.csv',
'good1_ctg.csv',
'good2_ctg.csv']
# for fn in filesList:
# pd.read_csv(fn)
def compare_a():
a1_df = pd.read_csv(filesList[0])
a2_df = pd.read_csv(filesList[1])
assert (a1_df.iloc[:,0] != a2_df.iloc[:,0]).sum() == 0
ac_ctg = pd.DataFrame([a1_df.iloc[:,1], a2_df.iloc[:,1]],).T
ac_ctg.index = a1_df.iloc[:,0]
assert np.allclose(ac_ctg.as_matrix(), ac.T)
def comapre_fc():
fc_ctg = pd.read_csv(filesList[4], index_col=0)
assert np.allclose(fc_ctg.as_matrix().ravel(), fc)
def compare_allbad():
allbad_ctg = pd.read_csv(filesList[2], index_col=0)
assert (allbad_ctg.as_matrix().ravel() == np.array(allbad)).all()
def compare_sdfc():
sdfc_ctg = pd.read_csv(filesList[7], index_col=0)
# print(sdfc)
# print(sdfc_ctg.as_matrix().ravel())
assert np.allclose(sdfc, sdfc_ctg.as_matrix().ravel())
def compare_df():
df_ctg = pd.read_csv(filesList[3], index_col=0)
assert np.allclose(df_ctg.as_matrix().ravel(), df)
def compare_p_t():
p_t_ctg = pd.read_csv(filesList[6], index_col=0)
assert np.allclose(p_t_ctg.as_matrix().ravel(), p_t)
def compare_lfdr():
lfdr_ctg = pd.read_csv(filesList[5], index_col=0)
print(lfdr_ctg.as_matrix().ravel())
print(lfdr)
print(np.allclose(lfdr_ctg.as_matrix().ravel(), lfdr))
def _compare_lambda():
#Note: lambda is usually not returned. Use for debugging
lmbda1_df = pd.read_csv(filesList[8], index_col=0)
lmbda2_df = pd.read_csv(filesList[9], index_col=0)
lmbda_ctg = pd.DataFrame([lmbda1_df.iloc[:,0], lmbda2_df.iloc[:,0]],).T
assert np.allclose(lmbda_ctg.as_matrix(), lmbda.T)
def _compare_xfit():
|
def _compare_mask():
good1_df = pd.read_csv(filesList[12], index_col=0)
good2_df = pd.read_csv(filesList[13], index_col=0)
assert np.allclose(good1_df.as_matrix(), mask[0])
assert np.allclose(good2_df.as_matrix(), mask[1])
if __name__ == "__main__":
def test_suite():
compare_a()
comapre_fc()
compare_allbad()
compare_sdfc()
#compare_df()
#compare_p_t()
#compare_lfdr()
# _compare_lambda()
# _compare_xfit()
# _compare_mask()
def fit_ac_fc(abundance_file, counts_file, times):
if isinstance(counts_file, str):
c = Counts.from_file(counts_file)
c.fit_ac_fc(pd.read_csv(abundance_file, sep='\t', index_col=0))
else:
c = Counts(counts_file)
c.fit_ac_fc(pd.read_csv(abundance_file, sep='\t', index_col=0))
return c.ac, c.fitness, c.mask.allbad, c.sdfc, c.p_t, c.names
global ac, fc, allbad, sdfc, p_t, names
#Testing files input
ac, fc, allbad, sdfc, p_t, names = fit_ac_fc(abundance_file, counts_file, times)
test_suite()
print('Passed file inputs (implicit)')
# #Testing files input
# ac, fc, allbad, sdfc, p_t, names = fit_ac_fc(abundance_file, counts_file, times, method='explicit',
# n_reps=2,
# columns_map=[[0,1],[2,3],[4,5],[6,7]])
# test_suite()
# print('Paseed file inputs (explicit)')
#Testing dataframe input
counts_df = pd.read_csv(counts_file, sep='\s+')
ac, fc, allbad, sdfc, p_t, names = fit_ac_fc(abundance_file, counts_df, times)
test_suite()
print('Passed dataframe input (implicit)')
#Testing explicit method
# ac, fc, allbad, sdfc, p_t, names = fit_ac_fc(abundance_file, counts_df, times, method='explicit',
# n_reps=2,
# columns_map=[[0,1],[2,3],[4,5],[6,7]])
# test_suite()
# print('Passed dataframe input (explicit')
#TODO: Test changing the column order
| xfit1_df = pd.read_csv(filesList[10], index_col=0)
xfit2_df = pd.read_csv(filesList[11], index_col=0)
assert np.allclose(xfit1_df.as_matrix(), xfit[0])
assert np.allclose(xfit2_df.as_matrix(), xfit[1]) |
deleteCoreV1NamespacedEndpoints.py | import json
from lib.k8s import K8sClient
class deleteCoreV1NamespacedEndpoints(K8sClient):
def run(
self,
body,
name,
namespace,
gracePeriodSeconds=None,
orphanDependents=None,
pretty=None,
config_override=None):
ret = False
args = {}
args['config_override'] = {}
args['params'] = {}
if config_override is not None:
args['config_override'] = config_override
if body is not None:
args['body'] = body | if name is not None:
args['name'] = name
else:
return (False, "name is a required parameter")
if namespace is not None:
args['namespace'] = namespace
else:
return (False, "namespace is a required parameter")
if gracePeriodSeconds is not None:
args['params'].update({'gracePeriodSeconds': gracePeriodSeconds})
if orphanDependents is not None:
args['params'].update({'orphanDependents': orphanDependents})
if pretty is not None:
args['params'].update({'pretty': pretty})
if 'body' in args:
args['data'] = args['body']
args.pop('body')
args['headers'] = {'Content-type': u'application/json', 'Accept': u'application/json, application/yaml, application/vnd.kubernetes.protobuf'} # noqa pylint: disable=line-too-long
args['url'] = "api/v1/namespaces/{namespace}/endpoints/{name}".format( # noqa pylint: disable=line-too-long
body=body, name=name, namespace=namespace)
args['method'] = "delete"
self.addArgs(**args)
self.makeRequest()
myresp = {}
myresp['status_code'] = self.resp.status_code
try:
myresp['data'] = json.loads(self.resp.content.rstrip())
except ValueError:
myresp['data'] = self.resp.content
if myresp['status_code'] >= 200 and myresp['status_code'] <= 299:
ret = True
return (ret, myresp) | else:
return (False, "body is a required parameter") |
standaloneLanguages.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { CancellationToken } from 'vs/base/common/cancellation';
import { Color } from 'vs/base/common/color';
import { IDisposable } from 'vs/base/common/lifecycle';
import { Position } from 'vs/editor/common/core/position';
import { Range } from 'vs/editor/common/core/range';
import * as model from 'vs/editor/common/model';
import * as languages from 'vs/editor/common/languages';
import { LanguageConfiguration } from 'vs/editor/common/languages/languageConfiguration';
import { ILanguageConfigurationService } from 'vs/editor/common/languages/languageConfigurationRegistry';
import { ModesRegistry } from 'vs/editor/common/languages/modesRegistry';
import { ILanguageExtensionPoint, ILanguageService } from 'vs/editor/common/languages/language';
import * as standaloneEnums from 'vs/editor/common/standalone/standaloneEnums';
import { StandaloneServices } from 'vs/editor/standalone/browser/standaloneServices';
import { compile } from 'vs/editor/standalone/common/monarch/monarchCompile';
import { MonarchTokenizer } from 'vs/editor/standalone/common/monarch/monarchLexer';
import { IMonarchLanguage } from 'vs/editor/standalone/common/monarch/monarchTypes';
import { IStandaloneThemeService } from 'vs/editor/standalone/common/standaloneTheme';
import { IMarkerData, IMarkerService } from 'vs/platform/markers/common/markers';
import { ILanguageFeaturesService } from 'vs/editor/common/services/languageFeatures';
import { LanguageSelector } from 'vs/editor/common/languageSelector';
/**
* Register information about a new language.
*/
export function register(language: ILanguageExtensionPoint): void {
// Intentionally using the `ModesRegistry` here to avoid
// instantiating services too quickly in the standalone editor.
ModesRegistry.registerLanguage(language);
}
/**
* Get the information of all the registered languages.
*/
export function getLanguages(): ILanguageExtensionPoint[] {
let result: ILanguageExtensionPoint[] = [];
result = result.concat(ModesRegistry.getLanguages());
return result;
}
export function getEncodedLanguageId(languageId: string): number {
const languageService = StandaloneServices.get(ILanguageService);
return languageService.languageIdCodec.encodeLanguageId(languageId);
}
/**
* An event emitted when a language is needed for the first time (e.g. a model has it set).
* @event
*/
export function onLanguage(languageId: string, callback: () => void): IDisposable {
const languageService = StandaloneServices.get(ILanguageService);
const disposable = languageService.onDidEncounterLanguage((encounteredLanguageId) => {
if (encounteredLanguageId === languageId) {
// stop listening
disposable.dispose();
// invoke actual listener
callback();
}
});
return disposable;
}
/**
* Set the editing configuration for a language.
*/
export function setLanguageConfiguration(languageId: string, configuration: LanguageConfiguration): IDisposable {
const languageService = StandaloneServices.get(ILanguageService);
if (!languageService.isRegisteredLanguageId(languageId)) {
throw new Error(`Cannot set configuration for unknown language ${languageId}`);
}
const languageConfigurationService = StandaloneServices.get(ILanguageConfigurationService);
return languageConfigurationService.register(languageId, configuration, 100);
}
/**
* @internal
*/
export class EncodedTokenizationSupportAdapter implements languages.ITokenizationSupport {
private readonly _languageId: string;
private readonly _actual: EncodedTokensProvider;
constructor(languageId: string, actual: EncodedTokensProvider) {
this._languageId = languageId;
this._actual = actual;
}
public getInitialState(): languages.IState {
return this._actual.getInitialState();
}
public tokenize(line: string, hasEOL: boolean, state: languages.IState): languages.TokenizationResult {
if (typeof this._actual.tokenize === 'function') {
return TokenizationSupportAdapter.adaptTokenize(this._languageId, <{ tokenize(line: string, state: languages.IState): ILineTokens }>this._actual, line, state);
}
throw new Error('Not supported!');
}
public tokenizeEncoded(line: string, hasEOL: boolean, state: languages.IState): languages.EncodedTokenizationResult {
const result = this._actual.tokenizeEncoded(line, state);
return new languages.EncodedTokenizationResult(result.tokens, result.endState);
}
}
/**
* @internal
*/
export class TokenizationSupportAdapter implements languages.ITokenizationSupport {
constructor(
private readonly _languageId: string,
private readonly _actual: TokensProvider,
private readonly _languageService: ILanguageService,
private readonly _standaloneThemeService: IStandaloneThemeService,
) {
}
public getInitialState(): languages.IState {
return this._actual.getInitialState();
}
private static _toClassicTokens(tokens: IToken[], language: string): languages.Token[] {
const result: languages.Token[] = [];
let previousStartIndex: number = 0;
for (let i = 0, len = tokens.length; i < len; i++) {
const t = tokens[i];
let startIndex = t.startIndex;
// Prevent issues stemming from a buggy external tokenizer.
if (i === 0) {
// Force first token to start at first index!
startIndex = 0;
} else if (startIndex < previousStartIndex) {
// Force tokens to be after one another!
startIndex = previousStartIndex;
}
result[i] = new languages.Token(startIndex, t.scopes, language);
previousStartIndex = startIndex;
}
return result;
}
public static adaptTokenize(language: string, actual: { tokenize(line: string, state: languages.IState): ILineTokens }, line: string, state: languages.IState): languages.TokenizationResult {
const actualResult = actual.tokenize(line, state);
const tokens = TokenizationSupportAdapter._toClassicTokens(actualResult.tokens, language);
let endState: languages.IState;
// try to save an object if possible
if (actualResult.endState.equals(state)) {
endState = state;
} else {
endState = actualResult.endState;
}
return new languages.TokenizationResult(tokens, endState);
}
public tokenize(line: string, hasEOL: boolean, state: languages.IState): languages.TokenizationResult {
return TokenizationSupportAdapter.adaptTokenize(this._languageId, this._actual, line, state);
}
private _toBinaryTokens(languageIdCodec: languages.ILanguageIdCodec, tokens: IToken[]): Uint32Array {
const languageId = languageIdCodec.encodeLanguageId(this._languageId);
const tokenTheme = this._standaloneThemeService.getColorTheme().tokenTheme;
const result: number[] = [];
let resultLen = 0;
let previousStartIndex: number = 0;
for (let i = 0, len = tokens.length; i < len; i++) {
const t = tokens[i];
const metadata = tokenTheme.match(languageId, t.scopes);
if (resultLen > 0 && result[resultLen - 1] === metadata) {
// same metadata
continue;
}
let startIndex = t.startIndex;
// Prevent issues stemming from a buggy external tokenizer.
if (i === 0) {
// Force first token to start at first index!
startIndex = 0;
} else if (startIndex < previousStartIndex) {
// Force tokens to be after one another!
startIndex = previousStartIndex;
}
result[resultLen++] = startIndex;
result[resultLen++] = metadata;
previousStartIndex = startIndex;
}
const actualResult = new Uint32Array(resultLen);
for (let i = 0; i < resultLen; i++) {
actualResult[i] = result[i];
}
return actualResult;
}
public tokenizeEncoded(line: string, hasEOL: boolean, state: languages.IState): languages.EncodedTokenizationResult {
const actualResult = this._actual.tokenize(line, state);
const tokens = this._toBinaryTokens(this._languageService.languageIdCodec, actualResult.tokens);
let endState: languages.IState;
// try to save an object if possible
if (actualResult.endState.equals(state)) {
endState = state;
} else {
endState = actualResult.endState;
}
return new languages.EncodedTokenizationResult(tokens, endState);
}
}
/**
* A token.
*/
export interface IToken {
startIndex: number;
scopes: string;
}
/**
* The result of a line tokenization.
*/
export interface ILineTokens {
/**
* The list of tokens on the line.
*/
tokens: IToken[];
/**
* The tokenization end state.
* A pointer will be held to this and the object should not be modified by the tokenizer after the pointer is returned.
*/
endState: languages.IState;
}
/**
* The result of a line tokenization.
*/
export interface IEncodedLineTokens {
/**
* The tokens on the line in a binary, encoded format. Each token occupies two array indices. For token i:
* - at offset 2*i => startIndex
* - at offset 2*i + 1 => metadata
* Meta data is in binary format:
* - -------------------------------------------
* 3322 2222 2222 1111 1111 1100 0000 0000
* 1098 7654 3210 9876 5432 1098 7654 3210
* - -------------------------------------------
* bbbb bbbb bfff ffff ffFF FFTT LLLL LLLL
* - -------------------------------------------
* - L = EncodedLanguageId (8 bits): Use `getEncodedLanguageId` to get the encoded ID of a language.
* - T = StandardTokenType (2 bits): Other = 0, Comment = 1, String = 2, RegEx = 3.
* - F = FontStyle (4 bits): None = 0, Italic = 1, Bold = 2, Underline = 4, Strikethrough = 8.
* - f = foreground ColorId (9 bits)
* - b = background ColorId (9 bits)
* - The color value for each colorId is defined in IStandaloneThemeData.customTokenColors:
* e.g. colorId = 1 is stored in IStandaloneThemeData.customTokenColors[1]. Color id = 0 means no color,
* id = 1 is for the default foreground color, id = 2 for the default background.
*/
tokens: Uint32Array;
/**
* The tokenization end state.
* A pointer will be held to this and the object should not be modified by the tokenizer after the pointer is returned.
*/
endState: languages.IState;
}
/**
* A factory for token providers.
*/
export interface TokensProviderFactory {
create(): languages.ProviderResult<TokensProvider | EncodedTokensProvider | IMonarchLanguage>;
}
/**
* A "manual" provider of tokens.
*/
export interface TokensProvider {
/**
* The initial state of a language. Will be the state passed in to tokenize the first line.
*/
getInitialState(): languages.IState;
/**
* Tokenize a line given the state at the beginning of the line.
*/
tokenize(line: string, state: languages.IState): ILineTokens;
}
/**
* A "manual" provider of tokens, returning tokens in a binary form.
*/
export interface EncodedTokensProvider {
/**
* The initial state of a language. Will be the state passed in to tokenize the first line.
*/
getInitialState(): languages.IState;
/**
* Tokenize a line given the state at the beginning of the line.
*/
tokenizeEncoded(line: string, state: languages.IState): IEncodedLineTokens;
/**
* Tokenize a line given the state at the beginning of the line.
*/
tokenize?(line: string, state: languages.IState): ILineTokens;
}
function isATokensProvider(provider: TokensProvider | EncodedTokensProvider | IMonarchLanguage): provider is TokensProvider | EncodedTokensProvider {
return (typeof provider.getInitialState === 'function');
}
function isEncodedTokensProvider(provider: TokensProvider | EncodedTokensProvider): provider is EncodedTokensProvider {
return 'tokenizeEncoded' in provider;
}
function isThenable<T>(obj: any): obj is Thenable<T> {
return obj && typeof obj.then === 'function';
}
/**
* Change the color map that is used for token colors.
* Supported formats (hex): #RRGGBB, $RRGGBBAA, #RGB, #RGBA
*/
export function setColorMap(colorMap: string[] | null): void {
const standaloneThemeService = StandaloneServices.get(IStandaloneThemeService);
if (colorMap) {
const result: Color[] = [null!];
for (let i = 1, len = colorMap.length; i < len; i++) {
result[i] = Color.fromHex(colorMap[i]);
}
standaloneThemeService.setColorMapOverride(result);
} else {
standaloneThemeService.setColorMapOverride(null);
}
}
/**
* @internal
*/
function createTokenizationSupportAdapter(languageId: string, provider: TokensProvider | EncodedTokensProvider) {
if (isEncodedTokensProvider(provider)) {
return new EncodedTokenizationSupportAdapter(languageId, provider);
} else {
return new TokenizationSupportAdapter(
languageId,
provider,
StandaloneServices.get(ILanguageService),
StandaloneServices.get(IStandaloneThemeService),
);
}
}
/**
* Register a tokens provider factory for a language. This tokenizer will be exclusive with a tokenizer
* set using `setTokensProvider` or one created using `setMonarchTokensProvider`, but will work together
* with a tokens provider set using `registerDocumentSemanticTokensProvider` or `registerDocumentRangeSemanticTokensProvider`.
*/
export function registerTokensProviderFactory(languageId: string, factory: TokensProviderFactory): IDisposable {
const adaptedFactory: languages.ITokenizationSupportFactory = {
createTokenizationSupport: async (): Promise<languages.ITokenizationSupport | null> => {
const result = await Promise.resolve(factory.create());
if (!result) {
return null;
}
if (isATokensProvider(result)) {
return createTokenizationSupportAdapter(languageId, result);
}
return new MonarchTokenizer(StandaloneServices.get(ILanguageService), StandaloneServices.get(IStandaloneThemeService), languageId, compile(languageId, result));
}
};
return languages.TokenizationRegistry.registerFactory(languageId, adaptedFactory);
}
/**
* Set the tokens provider for a language (manual implementation). This tokenizer will be exclusive
* with a tokenizer created using `setMonarchTokensProvider`, or with `registerTokensProviderFactory`,
* but will work together with a tokens provider set using `registerDocumentSemanticTokensProvider`
* or `registerDocumentRangeSemanticTokensProvider`.
*/
export function setTokensProvider(languageId: string, provider: TokensProvider | EncodedTokensProvider | Thenable<TokensProvider | EncodedTokensProvider>): IDisposable {
const languageService = StandaloneServices.get(ILanguageService);
if (!languageService.isRegisteredLanguageId(languageId)) {
throw new Error(`Cannot set tokens provider for unknown language ${languageId}`);
}
if (isThenable<TokensProvider | EncodedTokensProvider>(provider)) {
return registerTokensProviderFactory(languageId, { create: () => provider });
}
return languages.TokenizationRegistry.register(languageId, createTokenizationSupportAdapter(languageId, provider));
}
/**
* Set the tokens provider for a language (monarch implementation). This tokenizer will be exclusive
* with a tokenizer set using `setTokensProvider`, or with `registerTokensProviderFactory`, but will
* work together with a tokens provider set using `registerDocumentSemanticTokensProvider` or
* `registerDocumentRangeSemanticTokensProvider`.
*/
export function setMonarchTokensProvider(languageId: string, languageDef: IMonarchLanguage | Thenable<IMonarchLanguage>): IDisposable {
const create = (languageDef: IMonarchLanguage) => {
return new MonarchTokenizer(StandaloneServices.get(ILanguageService), StandaloneServices.get(IStandaloneThemeService), languageId, compile(languageId, languageDef));
};
if (isThenable<IMonarchLanguage>(languageDef)) {
return registerTokensProviderFactory(languageId, { create: () => languageDef });
}
return languages.TokenizationRegistry.register(languageId, create(languageDef));
}
/**
* Register a reference provider (used by e.g. reference search).
*/
export function registerReferenceProvider(languageSelector: LanguageSelector, provider: languages.ReferenceProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.referenceProvider.register(languageSelector, provider);
}
/**
* Register a rename provider (used by e.g. rename symbol).
*/
export function registerRenameProvider(languageSelector: LanguageSelector, provider: languages.RenameProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.renameProvider.register(languageSelector, provider);
}
/**
* Register a signature help provider (used by e.g. parameter hints).
*/
export function registerSignatureHelpProvider(languageSelector: LanguageSelector, provider: languages.SignatureHelpProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.signatureHelpProvider.register(languageSelector, provider);
}
/**
* Register a hover provider (used by e.g. editor hover).
*/
export function registerHoverProvider(languageSelector: LanguageSelector, provider: languages.HoverProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.hoverProvider.register(languageSelector, {
provideHover: (model: model.ITextModel, position: Position, token: CancellationToken): Promise<languages.Hover | undefined> => {
const word = model.getWordAtPosition(position);
return Promise.resolve<languages.Hover | null | undefined>(provider.provideHover(model, position, token)).then((value): languages.Hover | undefined => {
if (!value) {
return undefined;
} | value.range = new Range(position.lineNumber, position.column, position.lineNumber, position.column);
}
return value;
});
}
});
}
/**
* Register a document symbol provider (used by e.g. outline).
*/
export function registerDocumentSymbolProvider(languageSelector: LanguageSelector, provider: languages.DocumentSymbolProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.documentSymbolProvider.register(languageSelector, provider);
}
/**
* Register a document highlight provider (used by e.g. highlight occurrences).
*/
export function registerDocumentHighlightProvider(languageSelector: LanguageSelector, provider: languages.DocumentHighlightProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.documentHighlightProvider.register(languageSelector, provider);
}
/**
* Register an linked editing range provider.
*/
export function registerLinkedEditingRangeProvider(languageSelector: LanguageSelector, provider: languages.LinkedEditingRangeProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.linkedEditingRangeProvider.register(languageSelector, provider);
}
/**
* Register a definition provider (used by e.g. go to definition).
*/
export function registerDefinitionProvider(languageSelector: LanguageSelector, provider: languages.DefinitionProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.definitionProvider.register(languageSelector, provider);
}
/**
* Register a implementation provider (used by e.g. go to implementation).
*/
export function registerImplementationProvider(languageSelector: LanguageSelector, provider: languages.ImplementationProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.implementationProvider.register(languageSelector, provider);
}
/**
* Register a type definition provider (used by e.g. go to type definition).
*/
export function registerTypeDefinitionProvider(languageSelector: LanguageSelector, provider: languages.TypeDefinitionProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.typeDefinitionProvider.register(languageSelector, provider);
}
/**
* Register a code lens provider (used by e.g. inline code lenses).
*/
export function registerCodeLensProvider(languageSelector: LanguageSelector, provider: languages.CodeLensProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.codeLensProvider.register(languageSelector, provider);
}
/**
* Register a code action provider (used by e.g. quick fix).
*/
export function registerCodeActionProvider(languageSelector: LanguageSelector, provider: CodeActionProvider, metadata?: CodeActionProviderMetadata): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.codeActionProvider.register(languageSelector, {
providedCodeActionKinds: metadata?.providedCodeActionKinds,
provideCodeActions: (model: model.ITextModel, range: Range, context: languages.CodeActionContext, token: CancellationToken): languages.ProviderResult<languages.CodeActionList> => {
const markerService = StandaloneServices.get(IMarkerService);
const markers = markerService.read({ resource: model.uri }).filter(m => {
return Range.areIntersectingOrTouching(m, range);
});
return provider.provideCodeActions(model, range, { markers, only: context.only }, token);
},
resolveCodeAction: provider.resolveCodeAction
});
}
/**
* Register a formatter that can handle only entire models.
*/
export function registerDocumentFormattingEditProvider(languageSelector: LanguageSelector, provider: languages.DocumentFormattingEditProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.documentFormattingEditProvider.register(languageSelector, provider);
}
/**
* Register a formatter that can handle a range inside a model.
*/
export function registerDocumentRangeFormattingEditProvider(languageSelector: LanguageSelector, provider: languages.DocumentRangeFormattingEditProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.documentRangeFormattingEditProvider.register(languageSelector, provider);
}
/**
* Register a formatter than can do formatting as the user types.
*/
export function registerOnTypeFormattingEditProvider(languageSelector: LanguageSelector, provider: languages.OnTypeFormattingEditProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.onTypeFormattingEditProvider.register(languageSelector, provider);
}
/**
* Register a link provider that can find links in text.
*/
export function registerLinkProvider(languageSelector: LanguageSelector, provider: languages.LinkProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.linkProvider.register(languageSelector, provider);
}
/**
* Register a completion item provider (use by e.g. suggestions).
*/
export function registerCompletionItemProvider(languageSelector: LanguageSelector, provider: languages.CompletionItemProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.completionProvider.register(languageSelector, provider);
}
/**
* Register a document color provider (used by Color Picker, Color Decorator).
*/
export function registerColorProvider(languageSelector: LanguageSelector, provider: languages.DocumentColorProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.colorProvider.register(languageSelector, provider);
}
/**
* Register a folding range provider
*/
export function registerFoldingRangeProvider(languageSelector: LanguageSelector, provider: languages.FoldingRangeProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.foldingRangeProvider.register(languageSelector, provider);
}
/**
* Register a declaration provider
*/
export function registerDeclarationProvider(languageSelector: LanguageSelector, provider: languages.DeclarationProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.declarationProvider.register(languageSelector, provider);
}
/**
* Register a selection range provider
*/
export function registerSelectionRangeProvider(languageSelector: LanguageSelector, provider: languages.SelectionRangeProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.selectionRangeProvider.register(languageSelector, provider);
}
/**
* Register a document semantic tokens provider. A semantic tokens provider will complement and enhance a
* simple top-down tokenizer. Simple top-down tokenizers can be set either via `setMonarchTokensProvider`
* or `setTokensProvider`.
*
* For the best user experience, register both a semantic tokens provider and a top-down tokenizer.
*/
export function registerDocumentSemanticTokensProvider(languageSelector: LanguageSelector, provider: languages.DocumentSemanticTokensProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.documentSemanticTokensProvider.register(languageSelector, provider);
}
/**
* Register a document range semantic tokens provider. A semantic tokens provider will complement and enhance a
* simple top-down tokenizer. Simple top-down tokenizers can be set either via `setMonarchTokensProvider`
* or `setTokensProvider`.
*
* For the best user experience, register both a semantic tokens provider and a top-down tokenizer.
*/
export function registerDocumentRangeSemanticTokensProvider(languageSelector: LanguageSelector, provider: languages.DocumentRangeSemanticTokensProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.documentRangeSemanticTokensProvider.register(languageSelector, provider);
}
/**
* Register an inline completions provider.
*/
export function registerInlineCompletionsProvider(languageSelector: LanguageSelector, provider: languages.InlineCompletionsProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.inlineCompletionsProvider.register(languageSelector, provider);
}
/**
* Register an inlay hints provider.
*/
export function registerInlayHintsProvider(languageSelector: LanguageSelector, provider: languages.InlayHintsProvider): IDisposable {
const languageFeaturesService = StandaloneServices.get(ILanguageFeaturesService);
return languageFeaturesService.inlayHintsProvider.register(languageSelector, provider);
}
/**
* Contains additional diagnostic information about the context in which
* a [code action](#CodeActionProvider.provideCodeActions) is run.
*/
export interface CodeActionContext {
/**
* An array of diagnostics.
*/
readonly markers: IMarkerData[];
/**
* Requested kind of actions to return.
*/
readonly only?: string;
}
/**
* The code action interface defines the contract between extensions and
* the [light bulb](https://code.visualstudio.com/docs/editor/editingevolved#_code-action) feature.
*/
export interface CodeActionProvider {
/**
* Provide commands for the given document and range.
*/
provideCodeActions(model: model.ITextModel, range: Range, context: CodeActionContext, token: CancellationToken): languages.ProviderResult<languages.CodeActionList>;
/**
* Given a code action fill in the edit. Will only invoked when missing.
*/
resolveCodeAction?(codeAction: languages.CodeAction, token: CancellationToken): languages.ProviderResult<languages.CodeAction>;
}
/**
* Metadata about the type of code actions that a {@link CodeActionProvider} provides.
*/
export interface CodeActionProviderMetadata {
/**
* List of code action kinds that a {@link CodeActionProvider} may return.
*
* This list is used to determine if a given `CodeActionProvider` should be invoked or not.
* To avoid unnecessary computation, every `CodeActionProvider` should list use `providedCodeActionKinds`. The
* list of kinds may either be generic, such as `["quickfix", "refactor", "source"]`, or list out every kind provided,
* such as `["quickfix.removeLine", "source.fixAll" ...]`.
*/
readonly providedCodeActionKinds?: readonly string[];
}
/**
* @internal
*/
export function createMonacoLanguagesAPI(): typeof monaco.languages {
return {
register: <any>register,
getLanguages: <any>getLanguages,
onLanguage: <any>onLanguage,
getEncodedLanguageId: <any>getEncodedLanguageId,
// provider methods
setLanguageConfiguration: <any>setLanguageConfiguration,
setColorMap: setColorMap,
registerTokensProviderFactory: <any>registerTokensProviderFactory,
setTokensProvider: <any>setTokensProvider,
setMonarchTokensProvider: <any>setMonarchTokensProvider,
registerReferenceProvider: <any>registerReferenceProvider,
registerRenameProvider: <any>registerRenameProvider,
registerCompletionItemProvider: <any>registerCompletionItemProvider,
registerSignatureHelpProvider: <any>registerSignatureHelpProvider,
registerHoverProvider: <any>registerHoverProvider,
registerDocumentSymbolProvider: <any>registerDocumentSymbolProvider,
registerDocumentHighlightProvider: <any>registerDocumentHighlightProvider,
registerLinkedEditingRangeProvider: <any>registerLinkedEditingRangeProvider,
registerDefinitionProvider: <any>registerDefinitionProvider,
registerImplementationProvider: <any>registerImplementationProvider,
registerTypeDefinitionProvider: <any>registerTypeDefinitionProvider,
registerCodeLensProvider: <any>registerCodeLensProvider,
registerCodeActionProvider: <any>registerCodeActionProvider,
registerDocumentFormattingEditProvider: <any>registerDocumentFormattingEditProvider,
registerDocumentRangeFormattingEditProvider: <any>registerDocumentRangeFormattingEditProvider,
registerOnTypeFormattingEditProvider: <any>registerOnTypeFormattingEditProvider,
registerLinkProvider: <any>registerLinkProvider,
registerColorProvider: <any>registerColorProvider,
registerFoldingRangeProvider: <any>registerFoldingRangeProvider,
registerDeclarationProvider: <any>registerDeclarationProvider,
registerSelectionRangeProvider: <any>registerSelectionRangeProvider,
registerDocumentSemanticTokensProvider: <any>registerDocumentSemanticTokensProvider,
registerDocumentRangeSemanticTokensProvider: <any>registerDocumentRangeSemanticTokensProvider,
registerInlineCompletionsProvider: <any>registerInlineCompletionsProvider,
registerInlayHintsProvider: <any>registerInlayHintsProvider,
// enums
DocumentHighlightKind: standaloneEnums.DocumentHighlightKind,
CompletionItemKind: standaloneEnums.CompletionItemKind,
CompletionItemTag: standaloneEnums.CompletionItemTag,
CompletionItemInsertTextRule: standaloneEnums.CompletionItemInsertTextRule,
SymbolKind: standaloneEnums.SymbolKind,
SymbolTag: standaloneEnums.SymbolTag,
IndentAction: standaloneEnums.IndentAction,
CompletionTriggerKind: standaloneEnums.CompletionTriggerKind,
SignatureHelpTriggerKind: standaloneEnums.SignatureHelpTriggerKind,
InlayHintKind: standaloneEnums.InlayHintKind,
InlineCompletionTriggerKind: standaloneEnums.InlineCompletionTriggerKind,
// classes
FoldingRangeKind: languages.FoldingRangeKind,
};
} | if (!value.range && word) {
value.range = new Range(position.lineNumber, word.startColumn, position.lineNumber, word.endColumn);
}
if (!value.range) { |
streamingPolicy.go | // *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v20210601
import (
"context"
"reflect"
"github.com/pkg/errors"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
type StreamingPolicy struct {
pulumi.CustomResourceState
CommonEncryptionCbcs CommonEncryptionCbcsResponsePtrOutput `pulumi:"commonEncryptionCbcs"`
CommonEncryptionCenc CommonEncryptionCencResponsePtrOutput `pulumi:"commonEncryptionCenc"`
Created pulumi.StringOutput `pulumi:"created"`
DefaultContentKeyPolicyName pulumi.StringPtrOutput `pulumi:"defaultContentKeyPolicyName"`
EnvelopeEncryption EnvelopeEncryptionResponsePtrOutput `pulumi:"envelopeEncryption"`
Name pulumi.StringOutput `pulumi:"name"`
NoEncryption NoEncryptionResponsePtrOutput `pulumi:"noEncryption"`
SystemData SystemDataResponseOutput `pulumi:"systemData"`
Type pulumi.StringOutput `pulumi:"type"`
}
// NewStreamingPolicy registers a new resource with the given unique name, arguments, and options.
func NewStreamingPolicy(ctx *pulumi.Context,
name string, args *StreamingPolicyArgs, opts ...pulumi.ResourceOption) (*StreamingPolicy, error) {
if args == nil {
return nil, errors.New("missing one or more required arguments")
}
if args.AccountName == nil {
return nil, errors.New("invalid value for required argument 'AccountName'")
}
if args.ResourceGroupName == nil {
return nil, errors.New("invalid value for required argument 'ResourceGroupName'")
}
aliases := pulumi.Aliases([]pulumi.Alias{
{
Type: pulumi.String("azure-nextgen:media/v20210601:StreamingPolicy"),
},
{
Type: pulumi.String("azure-native:media:StreamingPolicy"),
},
{
Type: pulumi.String("azure-nextgen:media:StreamingPolicy"),
},
{
Type: pulumi.String("azure-native:media/v20180330preview:StreamingPolicy"),
},
{
Type: pulumi.String("azure-nextgen:media/v20180330preview:StreamingPolicy"),
},
{
Type: pulumi.String("azure-native:media/v20180601preview:StreamingPolicy"),
},
{
Type: pulumi.String("azure-nextgen:media/v20180601preview:StreamingPolicy"),
},
{
Type: pulumi.String("azure-native:media/v20180701:StreamingPolicy"), | },
{
Type: pulumi.String("azure-nextgen:media/v20180701:StreamingPolicy"),
},
{
Type: pulumi.String("azure-native:media/v20200501:StreamingPolicy"),
},
{
Type: pulumi.String("azure-nextgen:media/v20200501:StreamingPolicy"),
},
})
opts = append(opts, aliases)
var resource StreamingPolicy
err := ctx.RegisterResource("azure-native:media/v20210601:StreamingPolicy", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// GetStreamingPolicy gets an existing StreamingPolicy resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func GetStreamingPolicy(ctx *pulumi.Context,
name string, id pulumi.IDInput, state *StreamingPolicyState, opts ...pulumi.ResourceOption) (*StreamingPolicy, error) {
var resource StreamingPolicy
err := ctx.ReadResource("azure-native:media/v20210601:StreamingPolicy", name, id, state, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// Input properties used for looking up and filtering StreamingPolicy resources.
type streamingPolicyState struct {
}
type StreamingPolicyState struct {
}
func (StreamingPolicyState) ElementType() reflect.Type {
return reflect.TypeOf((*streamingPolicyState)(nil)).Elem()
}
type streamingPolicyArgs struct {
AccountName string `pulumi:"accountName"`
CommonEncryptionCbcs *CommonEncryptionCbcs `pulumi:"commonEncryptionCbcs"`
CommonEncryptionCenc *CommonEncryptionCenc `pulumi:"commonEncryptionCenc"`
DefaultContentKeyPolicyName *string `pulumi:"defaultContentKeyPolicyName"`
EnvelopeEncryption *EnvelopeEncryption `pulumi:"envelopeEncryption"`
NoEncryption *NoEncryption `pulumi:"noEncryption"`
ResourceGroupName string `pulumi:"resourceGroupName"`
StreamingPolicyName *string `pulumi:"streamingPolicyName"`
}
// The set of arguments for constructing a StreamingPolicy resource.
type StreamingPolicyArgs struct {
AccountName pulumi.StringInput
CommonEncryptionCbcs CommonEncryptionCbcsPtrInput
CommonEncryptionCenc CommonEncryptionCencPtrInput
DefaultContentKeyPolicyName pulumi.StringPtrInput
EnvelopeEncryption EnvelopeEncryptionPtrInput
NoEncryption NoEncryptionPtrInput
ResourceGroupName pulumi.StringInput
StreamingPolicyName pulumi.StringPtrInput
}
func (StreamingPolicyArgs) ElementType() reflect.Type {
return reflect.TypeOf((*streamingPolicyArgs)(nil)).Elem()
}
type StreamingPolicyInput interface {
pulumi.Input
ToStreamingPolicyOutput() StreamingPolicyOutput
ToStreamingPolicyOutputWithContext(ctx context.Context) StreamingPolicyOutput
}
func (*StreamingPolicy) ElementType() reflect.Type {
return reflect.TypeOf((*StreamingPolicy)(nil))
}
func (i *StreamingPolicy) ToStreamingPolicyOutput() StreamingPolicyOutput {
return i.ToStreamingPolicyOutputWithContext(context.Background())
}
func (i *StreamingPolicy) ToStreamingPolicyOutputWithContext(ctx context.Context) StreamingPolicyOutput {
return pulumi.ToOutputWithContext(ctx, i).(StreamingPolicyOutput)
}
type StreamingPolicyOutput struct{ *pulumi.OutputState }
func (StreamingPolicyOutput) ElementType() reflect.Type {
return reflect.TypeOf((*StreamingPolicy)(nil))
}
func (o StreamingPolicyOutput) ToStreamingPolicyOutput() StreamingPolicyOutput {
return o
}
func (o StreamingPolicyOutput) ToStreamingPolicyOutputWithContext(ctx context.Context) StreamingPolicyOutput {
return o
}
func init() {
pulumi.RegisterOutputType(StreamingPolicyOutput{})
} | |
crc.rs | #[cfg_attr(feature = "cargo-clippy", allow(unreadable_literal))]
pub const CRC32_TABLE: &[u32] = &[
0x00000000, 0xB71DC104, 0x6E3B8209, 0xD926430D, 0xDC760413, 0x6B6BC517, 0xB24D861A, 0x0550471E,
0xB8ED0826, 0x0FF0C922, 0xD6D68A2F, 0x61CB4B2B, 0x649B0C35, 0xD386CD31, 0x0AA08E3C, 0xBDBD4F38,
0x70DB114C, 0xC7C6D048, 0x1EE09345, 0xA9FD5241, 0xACAD155F, 0x1BB0D45B, 0xC2969756, 0x758B5652,
0xC836196A, 0x7F2BD86E, 0xA60D9B63, 0x11105A67, 0x14401D79, 0xA35DDC7D, 0x7A7B9F70, 0xCD665E74,
0xE0B62398, 0x57ABE29C, 0x8E8DA191, 0x39906095, 0x3CC0278B, 0x8BDDE68F, 0x52FBA582, 0xE5E66486,
0x585B2BBE, 0xEF46EABA, 0x3660A9B7, 0x817D68B3, 0x842D2FAD, 0x3330EEA9, 0xEA16ADA4, 0x5D0B6CA0,
0x906D32D4, 0x2770F3D0, 0xFE56B0DD, 0x494B71D9, 0x4C1B36C7, 0xFB06F7C3, 0x2220B4CE, 0x953D75CA,
0x28803AF2, 0x9F9DFBF6, 0x46BBB8FB, 0xF1A679FF, 0xF4F63EE1, 0x43EBFFE5, 0x9ACDBCE8, 0x2DD07DEC,
0x77708634, 0xC06D4730, 0x194B043D, 0xAE56C539, 0xAB068227, 0x1C1B4323, 0xC53D002E, 0x7220C12A,
0xCF9D8E12, 0x78804F16, 0xA1A60C1B, 0x16BBCD1F, 0x13EB8A01, 0xA4F64B05, 0x7DD00808, 0xCACDC90C,
0x07AB9778, 0xB0B6567C, 0x69901571, 0xDE8DD475, 0xDBDD936B, 0x6CC0526F, 0xB5E61162, 0x02FBD066,
0xBF469F5E, 0x085B5E5A, 0xD17D1D57, 0x6660DC53, 0x63309B4D, 0xD42D5A49, 0x0D0B1944, 0xBA16D840,
0x97C6A5AC, 0x20DB64A8, 0xF9FD27A5, 0x4EE0E6A1, 0x4BB0A1BF, 0xFCAD60BB, 0x258B23B6, 0x9296E2B2,
0x2F2BAD8A, 0x98366C8E, 0x41102F83, 0xF60DEE87, 0xF35DA999, 0x4440689D, 0x9D662B90, 0x2A7BEA94,
0xE71DB4E0, 0x500075E4, 0x892636E9, 0x3E3BF7ED, 0x3B6BB0F3, 0x8C7671F7, 0x555032FA, 0xE24DF3FE,
0x5FF0BCC6, 0xE8ED7DC2, 0x31CB3ECF, 0x86D6FFCB, 0x8386B8D5, 0x349B79D1, 0xEDBD3ADC, 0x5AA0FBD8,
0xEEE00C69, 0x59FDCD6D, 0x80DB8E60, 0x37C64F64, 0x3296087A, 0x858BC97E, 0x5CAD8A73, 0xEBB04B77,
0x560D044F, 0xE110C54B, 0x38368646, 0x8F2B4742, 0x8A7B005C, 0x3D66C158, 0xE4408255, 0x535D4351,
0x9E3B1D25, 0x2926DC21, 0xF0009F2C, 0x471D5E28, 0x424D1936, 0xF550D832, 0x2C769B3F, 0x9B6B5A3B,
0x26D61503, 0x91CBD407, 0x48ED970A, 0xFFF0560E, 0xFAA01110, 0x4DBDD014, 0x949B9319, 0x2386521D,
0x0E562FF1, 0xB94BEEF5, 0x606DADF8, 0xD7706CFC, 0xD2202BE2, 0x653DEAE6, 0xBC1BA9EB, 0x0B0668EF,
0xB6BB27D7, 0x01A6E6D3, 0xD880A5DE, 0x6F9D64DA, 0x6ACD23C4, 0xDDD0E2C0, 0x04F6A1CD, 0xB3EB60C9,
0x7E8D3EBD, 0xC990FFB9, 0x10B6BCB4, 0xA7AB7DB0, 0xA2FB3AAE, 0x15E6FBAA, 0xCCC0B8A7, 0x7BDD79A3,
0xC660369B, 0x717DF79F, 0xA85BB492, 0x1F467596, 0x1A163288, 0xAD0BF38C, 0x742DB081, 0xC3307185,
0x99908A5D, 0x2E8D4B59, 0xF7AB0854, 0x40B6C950, 0x45E68E4E, 0xF2FB4F4A, 0x2BDD0C47, 0x9CC0CD43,
0x217D827B, 0x9660437F, 0x4F460072, 0xF85BC176, 0xFD0B8668, 0x4A16476C, 0x93300461, 0x242DC565,
0xE94B9B11, 0x5E565A15, 0x87701918, 0x306DD81C, 0x353D9F02, 0x82205E06, 0x5B061D0B, 0xEC1BDC0F,
0x51A69337, 0xE6BB5233, 0x3F9D113E, 0x8880D03A, 0x8DD09724, 0x3ACD5620, 0xE3EB152D, 0x54F6D429,
0x7926A9C5, 0xCE3B68C1, 0x171D2BCC, 0xA000EAC8, 0xA550ADD6, 0x124D6CD2, 0xCB6B2FDF, 0x7C76EEDB,
0xC1CBA1E3, 0x76D660E7, 0xAFF023EA, 0x18EDE2EE, 0x1DBDA5F0, 0xAAA064F4, 0x738627F9, 0xC49BE6FD,
0x09FDB889, 0xBEE0798D, 0x67C63A80, 0xD0DBFB84, 0xD58BBC9A, 0x62967D9E, 0xBBB03E93, 0x0CADFF97,
0xB110B0AF, 0x060D71AB, 0xDF2B32A6, 0x6836F3A2, 0x6D66B4BC, 0xDA7B75B8, 0x035D36B5, 0xB440F7B1,
0x00000001,
];
#[derive(Debug)]
pub struct Crc32(u32);
impl Crc32 {
pub fn new() -> Self |
pub fn update(&mut self, data: &[u8]) {
for b in data {
let i = self.0 as u8;
self.0 = CRC32_TABLE[(i ^ b) as usize] ^ (self.0 >> 8);
}
}
pub fn value(&self) -> u32 {
self.0.swap_bytes()
}
}
| {
Crc32(0xFFFF_FFFF)
} |
farewells.rs | // english/farewells.rs
pub fn goodbye() {
println!("Goodbye!")
}
| println!("See you!")
} | pub fn see_you() { |
hotpotqa_utils_joint.py | import torch
import numpy as np
import json, sys, re, string
import collections
from collections import Counter
from collections import OrderedDict
def get_sp_pred(pred_sp_idx, data):
"""get the prediction of supporting facts in original format
Arguments:
pred_sp_idx {[type]} -- [description]
data {[type]} -- [description]
"""
pred = []
for p in pred_sp_idx:
if p < len(data):
pred.append([data[p].doc_title[0], data[p].sent_id])
return pred
def process_logit(batch_index, batch_logits, predict_features, predict_examples, max_answer_length):
"""get predictions for each sample in the batch
Arguments:
batch_index {[type]} -- [description]
batch_logits {[type]} -- 0: supporting facts logits, 1: answer span logits, 2: answer type logits 3: gold doc logits
batch_size {[type]} -- [description]
predict_file {[type]} -- [description]
"""
sp_logits_np = torch.sigmoid(batch_logits[0]).detach().cpu().numpy()
ans_type_logits_np = batch_logits[1].detach().cpu().numpy()
batch_index = batch_index.numpy().tolist()
sp_pred, span_pred, ans_type_pred = [], [], []
for idx, data in enumerate(batch_index):
# supporting facts prediction
pred_sp_idx = [ x[0] for x in enumerate(sp_logits_np[idx,:].tolist()) if x[1] > 0.5 ]
print(pred_sp_idx)
if len(pred_sp_idx) != 0:
sp_pred.append(get_sp_pred(pred_sp_idx, predict_examples[data]))
else:
sp_pred.append([])
# answer type prediction, for debug purpose
ans_type_pred.append(np.argmax(ans_type_logits_np[idx,:]))
# answer span prediction
if ans_type_pred[-1] == 0:
span_pred.append("no")
elif ans_type_pred[-1] == 1:
span_pred.append("yes")
else:
span_pred.append("")
return sp_pred, span_pred, ans_type_pred
# def evaluate(eval_file, answer_dict):
# f1 = exact_match = total = 0
# for key, value in enumerate(answer_dict):
# total += 1
# ground_truths = eval_file[key]["answer"]
# prediction = value
# cur_EM = exact_match_score(prediction, ground_truths)
# cur_f1, _, _ = f1_score(prediction, ground_truths)
# exact_match += cur_EM
# f1 += cur_f1
# exact_match = 100.0 * exact_match / total
# f1 = 100.0 * f1 / total
# return {'exact_match': exact_match, 'f1': f1}
def normalize_answer(s):
def remove_articles(text):
|
def white_space_fix(text):
return ' '.join(text.split())
def remove_punc(text):
exclude = set(string.punctuation)
return ''.join(ch for ch in text if ch not in exclude)
def lower(text):
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(s))))
def f1_score(prediction, ground_truth):
normalized_prediction = normalize_answer(prediction)
normalized_ground_truth = normalize_answer(ground_truth)
ZERO_METRIC = (0, 0, 0)
if normalized_prediction in ['yes', 'no', 'noanswer'] and normalized_prediction != normalized_ground_truth:
return ZERO_METRIC
if normalized_ground_truth in ['yes', 'no', 'noanswer'] and normalized_prediction != normalized_ground_truth:
return ZERO_METRIC
prediction_tokens = normalized_prediction.split()
ground_truth_tokens = normalized_ground_truth.split()
common = Counter(prediction_tokens) & Counter(ground_truth_tokens)
num_same = sum(common.values())
if num_same == 0:
return ZERO_METRIC
precision = 1.0 * num_same / len(prediction_tokens)
recall = 1.0 * num_same / len(ground_truth_tokens)
f1 = (2 * precision * recall) / (precision + recall)
return f1, precision, recall
def exact_match_score(prediction, ground_truth):
return (normalize_answer(prediction) == normalize_answer(ground_truth))
def write_prediction(sp_preds, answer_preds, orig_data, predict_file, output_dir):
"""write predictions to json file
Arguments:
sp_preds {[type]} -- [description]
answer_preds {[type]} -- [description]
orig_data {[type]} -- [description]
predict_file {[type]} -- [description]
output_dir {[type]} -- [description]
"""
if len(answer_preds) == 0:
answer_preds = ["place_holder"] * len(orig_data)
all_pred = {}
all_pred['answer'] = OrderedDict()
all_pred['sp'] = OrderedDict()
for idx, data in enumerate(orig_data):
all_pred['answer'][data['_id']] = answer_preds[idx]
all_pred['sp'][data['_id']] = sp_preds[idx]
with open(output_dir, 'w') as fid:
json.dump(all_pred, fid)
| return re.sub(r'\b(a|an|the)\b', ' ', text) |
filetwo.py | # -*- coding: utf-8 -*-
import sys
|
def get_name():
return lazyconfig.config.name | sys.path.append('./')
from lazyconfig import lazyconfig
|
main_test.go | package github
import (
"path"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/updatecli/updatecli/pkg/core/tmp"
)
func | (t *testing.T) {
tests := []struct {
name string
spec Spec
want Github
wantErr bool
}{
{
name: "Nominal case",
spec: Spec{
Branch: "main",
Repository: "updatecli",
Owner: "updatecli",
Directory: "/home/updatecli",
Username: "joe",
Token: "superSecretTOkenOfJoe",
URL: "github.com",
},
want: Github{
Spec: Spec{
Branch: "main",
Repository: "updatecli",
Owner: "updatecli",
Directory: "/home/updatecli",
Username: "joe",
Token: "superSecretTOkenOfJoe",
URL: "github.com",
},
},
},
{
name: "Nominal case with empty directory",
spec: Spec{
Branch: "main",
Repository: "updatecli",
Owner: "updatecli",
Username: "joe",
Token: "superSecretTOkenOfJoe",
URL: "github.com",
},
want: Github{
Spec: Spec{
Branch: "main",
Repository: "updatecli",
Owner: "updatecli",
Username: "joe",
Token: "superSecretTOkenOfJoe",
URL: "github.com",
Directory: path.Join(tmp.Directory, "updatecli", "updatecli"),
},
},
},
{
name: "Nominal case with empty URL",
spec: Spec{
Branch: "main",
Repository: "updatecli",
Owner: "updatecli",
Username: "joe",
Token: "superSecretTOkenOfJoe",
Directory: "/home/updatecli",
},
want: Github{
Spec: Spec{
Branch: "main",
Repository: "updatecli",
Owner: "updatecli",
Username: "joe",
Token: "superSecretTOkenOfJoe",
URL: "github.com",
Directory: "/home/updatecli",
},
},
},
{
name: "Validation Error (missing token)",
spec: Spec{
Branch: "main",
Repository: "updatecli",
Owner: "updatecli",
Directory: "/tmp/updatecli",
Username: "joe",
},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := New(tt.spec)
if tt.wantErr {
assert.Error(t, err)
return
}
require.NoError(t, err)
assert.Equal(t, tt.want.Spec, got.Spec)
assert.NotNil(t, got.client)
})
}
}
| TestNew |
cli.rs | // * This file is part of the uutils coreutils package.
// *
// * (c) 2014 T. Jameson Little <[email protected]>
// * (c) 2020 nicoo <[email protected]>
// *
// * For the full copyright and license information, please view the LICENSE file
// * that was distributed with this source code.
#[macro_use]
extern crate uucore;
use std::error::Error;
use std::fmt::Write as FmtWrite;
use std::io::{self, stdin, stdout, BufRead, Write};
mod factor;
use clap::{crate_version, Arg, Command};
pub use factor::*;
use uucore::display::Quotable;
use uucore::error::UResult;
mod miller_rabin;
pub mod numeric;
mod rho;
pub mod table;
static SUMMARY: &str = "Print the prime factors of the given NUMBER(s).
If none are specified, read from standard input.";
mod options {
pub static NUMBER: &str = "NUMBER";
}
fn print_factors_str(
num_str: &str,
w: &mut io::BufWriter<impl io::Write>,
factors_buffer: &mut String,
) -> Result<(), Box<dyn Error>> {
num_str.parse::<u64>().map_err(|e| e.into()).and_then(|x| {
factors_buffer.clear();
writeln!(factors_buffer, "{}:{}", x, factor(x))?;
w.write_all(factors_buffer.as_bytes())?;
Ok(())
})
}
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> |
pub fn uu_app<'a>() -> Command<'a> {
Command::new(uucore::util_name())
.version(crate_version!())
.about(SUMMARY)
.infer_long_args(true)
.arg(Arg::new(options::NUMBER).multiple_occurrences(true))
}
| {
let matches = uu_app().get_matches_from(args);
let stdout = stdout();
// We use a smaller buffer here to pass a gnu test. 4KiB appears to be the default pipe size for bash.
let mut w = io::BufWriter::with_capacity(4 * 1024, stdout.lock());
let mut factors_buffer = String::new();
if let Some(values) = matches.values_of(options::NUMBER) {
for number in values {
if let Err(e) = print_factors_str(number, &mut w, &mut factors_buffer) {
show_warning!("{}: {}", number.maybe_quote(), e);
}
}
} else {
let stdin = stdin();
for line in stdin.lock().lines() {
for number in line.unwrap().split_whitespace() {
if let Err(e) = print_factors_str(number, &mut w, &mut factors_buffer) {
show_warning!("{}: {}", number.maybe_quote(), e);
}
}
}
}
if let Err(e) = w.flush() {
show_error!("{}", e);
}
Ok(())
} |
axios.ts | // (C) 2019-2020 GoodData Corporation
import a, { AxiosInstance, AxiosRequestConfig } from "axios";
/**
* Default config from axios sets request headers:
*
* For all methods:
* Accept: application/json, text/html
*
* For POST and PUT of JS objects (isObject):
* Content-Type: application/json;charset=utf8
*
* Setting default Content-Type to application/json;charset=utf - will be sent regardless of data as the
* backend can only accept JSON anyway.
*/
const _CONFIG: AxiosRequestConfig = {
maxContentLength: -1,
headers: {
post: {
"Content-Type": "application/json;charset=utf8",
},
put: {
"Content-Type": "application/json;charset=utf8",
},
},
};
/**
* Returns an instance of axios with default configuration.
*/
export const axios: AxiosInstance = a.create(_CONFIG);
/**
* Creates a new instance of axios.
*
* @param baseUrl - hostname, optional, will default to current origin
* @param headers - object mapping header name -\> header value
* @returns always new instance
*/
export function | (baseUrl?: string, headers?: { [name: string]: string }): AxiosInstance {
const config: AxiosRequestConfig = _CONFIG;
if (baseUrl) {
config.baseURL = baseUrl;
}
if (headers) {
config.headers = {
common: headers,
...config.headers,
};
}
return a.create(config);
}
| newAxios |
runnables.rs | use std::fmt;
use ast::NameOwner;
use cfg::CfgExpr;
use either::Either;
use hir::{AsAssocItem, HasAttrs, HasSource, HirDisplay, Semantics};
use ide_assists::utils::test_related_attribute;
use ide_db::{
base_db::{FilePosition, FileRange},
helpers::visit_file_defs,
search::SearchScope,
RootDatabase, SymbolKind,
};
use itertools::Itertools;
use rustc_hash::FxHashSet;
use syntax::ast::{self, AstNode, AttrsOwner};
use crate::{
display::{ToNav, TryToNav},
references, FileId, NavigationTarget,
};
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Runnable {
pub nav: NavigationTarget,
pub kind: RunnableKind,
pub cfg: Option<CfgExpr>,
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub enum TestId {
Name(String),
Path(String),
}
impl fmt::Display for TestId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
TestId::Name(name) => write!(f, "{}", name),
TestId::Path(path) => write!(f, "{}", path),
}
}
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub enum RunnableKind {
Test { test_id: TestId, attr: TestAttr },
TestMod { path: String },
Bench { test_id: TestId },
DocTest { test_id: TestId },
Bin,
}
#[derive(Debug, Eq, PartialEq)]
pub struct RunnableAction {
pub run_title: &'static str,
pub debugee: bool,
}
const TEST: RunnableAction = RunnableAction { run_title: "▶\u{fe0e} Run Test", debugee: true };
const DOCTEST: RunnableAction =
RunnableAction { run_title: "▶\u{fe0e} Run Doctest", debugee: false };
const BENCH: RunnableAction = RunnableAction { run_title: "▶\u{fe0e} Run Bench", debugee: true };
const BIN: RunnableAction = RunnableAction { run_title: "▶\u{fe0e} Run", debugee: true };
impl Runnable {
// test package::module::testname
pub fn label(&self, target: Option<String>) -> String {
match &self.kind {
RunnableKind::Test { test_id, .. } => format!("test {}", test_id),
RunnableKind::TestMod { path } => format!("test-mod {}", path),
RunnableKind::Bench { test_id } => format!("bench {}", test_id),
RunnableKind::DocTest { test_id, .. } => format!("doctest {}", test_id),
RunnableKind::Bin => {
target.map_or_else(|| "run binary".to_string(), |t| format!("run {}", t))
}
}
}
pub fn action(&self) -> &'static RunnableAction {
match &self.kind {
RunnableKind::Test { .. } | RunnableKind::TestMod { .. } => &TEST,
RunnableKind::DocTest { .. } => &DOCTEST,
RunnableKind::Bench { .. } => &BENCH,
RunnableKind::Bin => &BIN,
}
}
}
// Feature: Run
//
// Shows a popup suggesting to run a test/benchmark/binary **at the current cursor
// location**. Super useful for repeatedly running just a single test. Do bind this
// to a shortcut!
//
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Run**
// |===
// image::https://user-images.githubusercontent.com/48062697/113065583-055aae80-91b1-11eb-958f-d67efcaf6a2f.gif[]
pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
let sema = Semantics::new(db);
let mut res = Vec::new();
visit_file_defs(&sema, file_id, &mut |def| match def {
Either::Left(def) => {
let runnable = match def {
hir::ModuleDef::Module(it) => runnable_mod(&sema, it),
hir::ModuleDef::Function(it) => runnable_fn(&sema, it),
_ => None,
};
res.extend(runnable.or_else(|| module_def_doctest(&sema, def)))
}
Either::Right(impl_) => {
res.extend(impl_.items(db).into_iter().filter_map(|assoc| match assoc {
hir::AssocItem::Function(it) => {
runnable_fn(&sema, it).or_else(|| module_def_doctest(&sema, it.into()))
}
hir::AssocItem::Const(it) => module_def_doctest(&sema, it.into()),
hir::AssocItem::TypeAlias(it) => module_def_doctest(&sema, it.into()),
}))
}
});
res
}
// Feature: Related Tests
//
// Provides a sneak peek of all tests where the current item is used.
//
// The simplest way to use this feature is via the context menu:
// - Right-click on the selected item. The context menu opens.
// - Select **Peek related tests**
//
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Peek related tests**
// |===
pub(crate) fn related_tests(
db: &RootDatabase,
position: FilePosition,
search_scope: Option<SearchScope>,
) -> Vec<Runnable> {
let sema = Semantics::new(db);
let mut res: FxHashSet<Runnable> = FxHashSet::default();
find_related_tests(&sema, position, search_scope, &mut res);
res.into_iter().collect_vec()
}
fn find_related_tests(
sema: &Semantics<RootDatabase>,
position: FilePosition,
search_scope: Option<SearchScope>,
tests: &mut FxHashSet<Runnable>,
) {
if let Some(refs) = references::find_all_refs(&sema, position, search_scope) {
for (file_id, refs) in refs.references {
let file = sema.parse(file_id);
let file = file.syntax();
let functions = refs.iter().filter_map(|(range, _)| {
let token = file.token_at_offset(range.start()).next()?;
let token = sema.descend_into_macros(token);
token.ancestors().find_map(ast::Fn::cast)
});
for fn_def in functions {
if let Some(runnable) = as_test_runnable(&sema, &fn_def) {
// direct test
tests.insert(runnable);
} else if let Some(module) = parent_test_module(&sema, &fn_def) {
// indirect test
find_related_tests_in_module(sema, &fn_def, &module, tests);
}
}
}
}
}
fn find_related_tests_in_module(
sema: &Semantics<RootDatabase>,
fn_def: &ast::Fn,
parent_module: &hir::Module,
tests: &mut FxHashSet<Runnable>,
) {
if let Some(fn_name) = fn_def.name() {
let mod_source = parent_module.definition_source(sema.db);
let range = match mod_source.value {
hir::ModuleSource::Module(m) => m.syntax().text_range(),
hir::ModuleSource::BlockExpr(b) => b.syntax().text_range(),
hir::ModuleSource::SourceFile(f) => f.syntax().text_range(),
};
let file_id = mod_source.file_id.original_file(sema.db);
let mod_scope = SearchScope::file_range(FileRange { file_id, range });
let fn_pos = FilePosition { file_id, offset: fn_name.syntax().text_range().start() };
find_related_tests(sema, fn_pos, Some(mod_scope), tests)
}
}
fn as_test_runnable(sema: &Semantics<RootDatabase>, fn_def: &ast::Fn) -> Option<Runnable> {
if test_related_attribute(&fn_def).is_some() {
let function = sema.to_def(fn_def)?;
runnable_fn(sema, function)
} else {
None
}
}
fn parent_test_module(sema: &Semantics<RootDatabase>, fn_def: &ast::Fn) -> Option<hir::Module> {
fn_def.syntax().ancestors().find_map(|node| {
let module = ast::Module::cast(node)?;
let module = sema.to_def(&module)?;
if has_test_function_or_multiple_test_submodules(sema, &module) {
Some(module)
} else {
None
}
})
}
pub(crate) fn runnable_fn(sema: &Semantics<RootDatabase>, def: hir::Function) -> Option<Runnable> {
let func = def.source(sema.db)?;
let name_string = def.name(sema.db).to_string();
let root = def.krate(sema.db)?.root_module(sema.db);
let kind = if name_string == "main" && def.module(sema.db) == root {
RunnableKind::Bin
} else {
let canonical_path = {
let def: hir::ModuleDef = def.into();
def.canonical_path(sema.db)
};
let test_id = canonical_path.map(TestId::Path).unwrap_or(TestId::Name(name_string));
if test_related_attribute(&func.value).is_some() {
let attr = TestAttr::from_fn(&func.value);
RunnableKind::Test { test_id, attr }
} else if func.value.has_atom_attr("bench") {
| return None;
}
};
let nav = NavigationTarget::from_named(
sema.db,
func.as_ref().map(|it| it as &dyn ast::NameOwner),
SymbolKind::Function,
);
let cfg = def.attrs(sema.db).cfg();
Some(Runnable { nav, kind, cfg })
}
pub(crate) fn runnable_mod(sema: &Semantics<RootDatabase>, def: hir::Module) -> Option<Runnable> {
if !has_test_function_or_multiple_test_submodules(sema, &def) {
return None;
}
let path =
def.path_to_root(sema.db).into_iter().rev().filter_map(|it| it.name(sema.db)).join("::");
let attrs = def.attrs(sema.db);
let cfg = attrs.cfg();
let nav = def.to_nav(sema.db);
Some(Runnable { nav, kind: RunnableKind::TestMod { path }, cfg })
}
fn module_def_doctest(sema: &Semantics<RootDatabase>, def: hir::ModuleDef) -> Option<Runnable> {
let attrs = match def {
hir::ModuleDef::Module(it) => it.attrs(sema.db),
hir::ModuleDef::Function(it) => it.attrs(sema.db),
hir::ModuleDef::Adt(it) => it.attrs(sema.db),
hir::ModuleDef::Variant(it) => it.attrs(sema.db),
hir::ModuleDef::Const(it) => it.attrs(sema.db),
hir::ModuleDef::Static(it) => it.attrs(sema.db),
hir::ModuleDef::Trait(it) => it.attrs(sema.db),
hir::ModuleDef::TypeAlias(it) => it.attrs(sema.db),
hir::ModuleDef::BuiltinType(_) => return None,
};
if !has_runnable_doc_test(&attrs) {
return None;
}
let def_name = def.name(sema.db).map(|it| it.to_string());
let test_id = def
.canonical_path(sema.db)
// This probably belongs to canonical path?
.map(|path| {
let assoc_def = match def {
hir::ModuleDef::Function(it) => it.as_assoc_item(sema.db),
hir::ModuleDef::Const(it) => it.as_assoc_item(sema.db),
hir::ModuleDef::TypeAlias(it) => it.as_assoc_item(sema.db),
_ => None,
};
// FIXME: this also looks very wrong
if let Some(assoc_def) = assoc_def {
if let hir::AssocItemContainer::Impl(imp) = assoc_def.container(sema.db) {
let ty = imp.self_ty(sema.db);
if let Some(adt) = ty.as_adt() {
let name = adt.name(sema.db);
let idx = path.rfind(':').map_or(0, |idx| idx + 1);
let (prefix, suffix) = path.split_at(idx);
let mut ty_args = ty.type_arguments().peekable();
let params = if ty_args.peek().is_some() {
format!(
"<{}>",
ty_args.format_with(", ", |ty, cb| cb(&ty.display(sema.db)))
)
} else {
String::new()
};
return format!("{}{}{}::{}", prefix, name, params, suffix);
}
}
}
path
})
.map(TestId::Path)
.or_else(|| def_name.clone().map(TestId::Name))?;
let mut nav = def.try_to_nav(sema.db)?;
nav.focus_range = None;
nav.description = None;
nav.docs = None;
nav.kind = None;
let res = Runnable { nav, kind: RunnableKind::DocTest { test_id }, cfg: attrs.cfg() };
Some(res)
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct TestAttr {
pub ignore: bool,
}
impl TestAttr {
fn from_fn(fn_def: &ast::Fn) -> TestAttr {
let ignore = fn_def
.attrs()
.filter_map(|attr| attr.simple_name())
.any(|attribute_text| attribute_text == "ignore");
TestAttr { ignore }
}
}
const RUSTDOC_FENCE: &str = "```";
const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] =
&["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"];
fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool {
attrs.docs().map_or(false, |doc| {
let mut in_code_block = false;
for line in String::from(doc).lines() {
if let Some(header) = line.strip_prefix(RUSTDOC_FENCE) {
in_code_block = !in_code_block;
if in_code_block
&& header
.split(',')
.all(|sub| RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE.contains(&sub.trim()))
{
return true;
}
}
}
false
})
}
// We could create runnables for modules with number_of_test_submodules > 0,
// but that bloats the runnables for no real benefit, since all tests can be run by the submodule already
fn has_test_function_or_multiple_test_submodules(
sema: &Semantics<RootDatabase>,
module: &hir::Module,
) -> bool {
let mut number_of_test_submodules = 0;
for item in module.declarations(sema.db) {
match item {
hir::ModuleDef::Function(f) => {
if let Some(it) = f.source(sema.db) {
if test_related_attribute(&it.value).is_some() {
return true;
}
}
}
hir::ModuleDef::Module(submodule) => {
if has_test_function_or_multiple_test_submodules(sema, &submodule) {
number_of_test_submodules += 1;
}
}
_ => (),
}
}
number_of_test_submodules > 1
}
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use crate::fixture;
use super::*;
fn check(
ra_fixture: &str,
// FIXME: fold this into `expect` as well
actions: &[&RunnableAction],
expect: Expect,
) {
let (analysis, position) = fixture::position(ra_fixture);
let runnables = analysis.runnables(position.file_id).unwrap();
expect.assert_debug_eq(&runnables);
assert_eq!(
actions,
runnables.into_iter().map(|it| it.action()).collect::<Vec<_>>().as_slice()
);
}
fn check_tests(ra_fixture: &str, expect: Expect) {
let (analysis, position) = fixture::position(ra_fixture);
let tests = analysis.related_tests(position, None).unwrap();
expect.assert_debug_eq(&tests);
}
#[test]
fn test_runnables() {
check(
r#"
//- /lib.rs
$0
fn main() {}
#[test]
fn test_foo() {}
#[test]
#[ignore]
fn test_foo() {}
#[bench]
fn bench() {}
mod not_a_root {
fn main() {}
}
"#,
&[&BIN, &TEST, &TEST, &BENCH],
expect![[r#"
[
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 1..13,
focus_range: 4..8,
name: "main",
kind: Function,
},
kind: Bin,
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 15..39,
focus_range: 26..34,
name: "test_foo",
kind: Function,
},
kind: Test {
test_id: Path(
"test_foo",
),
attr: TestAttr {
ignore: false,
},
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 41..75,
focus_range: 62..70,
name: "test_foo",
kind: Function,
},
kind: Test {
test_id: Path(
"test_foo",
),
attr: TestAttr {
ignore: true,
},
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 77..99,
focus_range: 89..94,
name: "bench",
kind: Function,
},
kind: Bench {
test_id: Path(
"bench",
),
},
cfg: None,
},
]
"#]],
);
}
#[test]
fn test_runnables_doc_test() {
check(
r#"
//- /lib.rs
$0
fn main() {}
/// ```
/// let x = 5;
/// ```
fn should_have_runnable() {}
/// ```edition2018
/// let x = 5;
/// ```
fn should_have_runnable_1() {}
/// ```
/// let z = 55;
/// ```
///
/// ```ignore
/// let z = 56;
/// ```
fn should_have_runnable_2() {}
/**
```rust
let z = 55;
```
*/
fn should_have_no_runnable_3() {}
/**
```rust
let z = 55;
```
*/
fn should_have_no_runnable_4() {}
/// ```no_run
/// let z = 55;
/// ```
fn should_have_no_runnable() {}
/// ```ignore
/// let z = 55;
/// ```
fn should_have_no_runnable_2() {}
/// ```compile_fail
/// let z = 55;
/// ```
fn should_have_no_runnable_3() {}
/// ```text
/// arbitrary plain text
/// ```
fn should_have_no_runnable_4() {}
/// ```text
/// arbitrary plain text
/// ```
///
/// ```sh
/// $ shell code
/// ```
fn should_have_no_runnable_5() {}
/// ```rust,no_run
/// let z = 55;
/// ```
fn should_have_no_runnable_6() {}
/// ```
/// let x = 5;
/// ```
struct StructWithRunnable(String);
"#,
&[&BIN, &DOCTEST, &DOCTEST, &DOCTEST, &DOCTEST, &DOCTEST, &DOCTEST],
expect![[r#"
[
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 1..13,
focus_range: 4..8,
name: "main",
kind: Function,
},
kind: Bin,
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 15..74,
name: "should_have_runnable",
},
kind: DocTest {
test_id: Path(
"should_have_runnable",
),
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 76..148,
name: "should_have_runnable_1",
},
kind: DocTest {
test_id: Path(
"should_have_runnable_1",
),
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 150..254,
name: "should_have_runnable_2",
},
kind: DocTest {
test_id: Path(
"should_have_runnable_2",
),
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 256..320,
name: "should_have_no_runnable_3",
},
kind: DocTest {
test_id: Path(
"should_have_no_runnable_3",
),
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 322..398,
name: "should_have_no_runnable_4",
},
kind: DocTest {
test_id: Path(
"should_have_no_runnable_4",
),
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 900..965,
name: "StructWithRunnable",
},
kind: DocTest {
test_id: Path(
"StructWithRunnable",
),
},
cfg: None,
},
]
"#]],
);
}
#[test]
fn test_runnables_doc_test_in_impl() {
check(
r#"
//- /lib.rs
$0
fn main() {}
struct Data;
impl Data {
/// ```
/// let x = 5;
/// ```
fn foo() {}
}
"#,
&[&BIN, &DOCTEST],
expect![[r#"
[
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 1..13,
focus_range: 4..8,
name: "main",
kind: Function,
},
kind: Bin,
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 44..98,
name: "foo",
},
kind: DocTest {
test_id: Path(
"Data::foo",
),
},
cfg: None,
},
]
"#]],
);
}
#[test]
fn test_runnables_module() {
check(
r#"
//- /lib.rs
$0
mod test_mod {
#[test]
fn test_foo1() {}
}
"#,
&[&TEST, &TEST],
expect![[r#"
[
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 1..51,
focus_range: 5..13,
name: "test_mod",
kind: Module,
},
kind: TestMod {
path: "test_mod",
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 20..49,
focus_range: 35..44,
name: "test_foo1",
kind: Function,
},
kind: Test {
test_id: Path(
"test_mod::test_foo1",
),
attr: TestAttr {
ignore: false,
},
},
cfg: None,
},
]
"#]],
);
}
#[test]
fn only_modules_with_test_functions_or_more_than_one_test_submodule_have_runners() {
check(
r#"
//- /lib.rs
$0
mod root_tests {
mod nested_tests_0 {
mod nested_tests_1 {
#[test]
fn nested_test_11() {}
#[test]
fn nested_test_12() {}
}
mod nested_tests_2 {
#[test]
fn nested_test_2() {}
}
mod nested_tests_3 {}
}
mod nested_tests_4 {}
}
"#,
&[&TEST, &TEST, &TEST, &TEST, &TEST, &TEST],
expect![[r#"
[
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 22..323,
focus_range: 26..40,
name: "nested_tests_0",
kind: Module,
},
kind: TestMod {
path: "root_tests::nested_tests_0",
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 51..192,
focus_range: 55..69,
name: "nested_tests_1",
kind: Module,
},
kind: TestMod {
path: "root_tests::nested_tests_0::nested_tests_1",
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 202..286,
focus_range: 206..220,
name: "nested_tests_2",
kind: Module,
},
kind: TestMod {
path: "root_tests::nested_tests_0::nested_tests_2",
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 84..126,
focus_range: 107..121,
name: "nested_test_11",
kind: Function,
},
kind: Test {
test_id: Path(
"root_tests::nested_tests_0::nested_tests_1::nested_test_11",
),
attr: TestAttr {
ignore: false,
},
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 140..182,
focus_range: 163..177,
name: "nested_test_12",
kind: Function,
},
kind: Test {
test_id: Path(
"root_tests::nested_tests_0::nested_tests_1::nested_test_12",
),
attr: TestAttr {
ignore: false,
},
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 235..276,
focus_range: 258..271,
name: "nested_test_2",
kind: Function,
},
kind: Test {
test_id: Path(
"root_tests::nested_tests_0::nested_tests_2::nested_test_2",
),
attr: TestAttr {
ignore: false,
},
},
cfg: None,
},
]
"#]],
);
}
#[test]
fn test_runnables_with_feature() {
check(
r#"
//- /lib.rs crate:foo cfg:feature=foo
$0
#[test]
#[cfg(feature = "foo")]
fn test_foo1() {}
"#,
&[&TEST],
expect![[r#"
[
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 1..50,
focus_range: 36..45,
name: "test_foo1",
kind: Function,
},
kind: Test {
test_id: Path(
"test_foo1",
),
attr: TestAttr {
ignore: false,
},
},
cfg: Some(
Atom(
KeyValue {
key: "feature",
value: "foo",
},
),
),
},
]
"#]],
);
}
#[test]
fn test_runnables_with_features() {
check(
r#"
//- /lib.rs crate:foo cfg:feature=foo,feature=bar
$0
#[test]
#[cfg(all(feature = "foo", feature = "bar"))]
fn test_foo1() {}
"#,
&[&TEST],
expect![[r#"
[
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 1..72,
focus_range: 58..67,
name: "test_foo1",
kind: Function,
},
kind: Test {
test_id: Path(
"test_foo1",
),
attr: TestAttr {
ignore: false,
},
},
cfg: Some(
All(
[
Atom(
KeyValue {
key: "feature",
value: "foo",
},
),
Atom(
KeyValue {
key: "feature",
value: "bar",
},
),
],
),
),
},
]
"#]],
);
}
#[test]
fn test_runnables_no_test_function_in_module() {
check(
r#"
//- /lib.rs
$0
mod test_mod {
fn foo1() {}
}
"#,
&[],
expect![[r#"
[]
"#]],
);
}
#[test]
fn test_doc_runnables_impl_mod() {
check(
r#"
//- /lib.rs
mod foo;
//- /foo.rs
struct Foo;$0
impl Foo {
/// ```
/// let x = 5;
/// ```
fn foo() {}
}
"#,
&[&DOCTEST],
expect![[r#"
[
Runnable {
nav: NavigationTarget {
file_id: FileId(
1,
),
full_range: 27..81,
name: "foo",
},
kind: DocTest {
test_id: Path(
"foo::Foo::foo",
),
},
cfg: None,
},
]
"#]],
);
}
#[test]
fn test_runnables_in_macro() {
check(
r#"
//- /lib.rs
$0
macro_rules! gen {
() => {
#[test]
fn foo_test() {
}
}
}
mod tests {
gen!();
}
"#,
&[&TEST, &TEST],
expect![[r#"
[
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 90..115,
focus_range: 94..99,
name: "tests",
kind: Module,
},
kind: TestMod {
path: "tests",
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 106..113,
focus_range: 106..113,
name: "foo_test",
kind: Function,
},
kind: Test {
test_id: Path(
"tests::foo_test",
),
attr: TestAttr {
ignore: false,
},
},
cfg: None,
},
]
"#]],
);
}
#[test]
fn dont_recurse_in_outline_submodules() {
check(
r#"
//- /lib.rs
$0
mod m;
//- /m.rs
mod tests {
#[test]
fn t() {}
}
"#,
&[],
expect![[r#"
[]
"#]],
);
}
#[test]
fn find_no_tests() {
check_tests(
r#"
//- /lib.rs
fn foo$0() { };
"#,
expect![[r#"
[]
"#]],
);
}
#[test]
fn find_direct_fn_test() {
check_tests(
r#"
//- /lib.rs
fn foo$0() { };
mod tests {
#[test]
fn foo_test() {
super::foo()
}
}
"#,
expect![[r#"
[
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 31..85,
focus_range: 46..54,
name: "foo_test",
kind: Function,
},
kind: Test {
test_id: Path(
"tests::foo_test",
),
attr: TestAttr {
ignore: false,
},
},
cfg: None,
},
]
"#]],
);
}
#[test]
fn find_direct_struct_test() {
check_tests(
r#"
//- /lib.rs
struct Fo$0o;
fn foo(arg: &Foo) { };
mod tests {
use super::*;
#[test]
fn foo_test() {
foo(Foo);
}
}
"#,
expect![[r#"
[
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 71..122,
focus_range: 86..94,
name: "foo_test",
kind: Function,
},
kind: Test {
test_id: Path(
"tests::foo_test",
),
attr: TestAttr {
ignore: false,
},
},
cfg: None,
},
]
"#]],
);
}
#[test]
fn find_indirect_fn_test() {
check_tests(
r#"
//- /lib.rs
fn foo$0() { };
mod tests {
use super::foo;
fn check1() {
check2()
}
fn check2() {
foo()
}
#[test]
fn foo_test() {
check1()
}
}
"#,
expect![[r#"
[
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 133..183,
focus_range: 148..156,
name: "foo_test",
kind: Function,
},
kind: Test {
test_id: Path(
"tests::foo_test",
),
attr: TestAttr {
ignore: false,
},
},
cfg: None,
},
]
"#]],
);
}
#[test]
fn tests_are_unique() {
check_tests(
r#"
//- /lib.rs
fn foo$0() { };
mod tests {
use super::foo;
#[test]
fn foo_test() {
foo();
foo();
}
#[test]
fn foo2_test() {
foo();
foo();
}
}
"#,
expect![[r#"
[
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 52..115,
focus_range: 67..75,
name: "foo_test",
kind: Function,
},
kind: Test {
test_id: Path(
"tests::foo_test",
),
attr: TestAttr {
ignore: false,
},
},
cfg: None,
},
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 121..185,
focus_range: 136..145,
name: "foo2_test",
kind: Function,
},
kind: Test {
test_id: Path(
"tests::foo2_test",
),
attr: TestAttr {
ignore: false,
},
},
cfg: None,
},
]
"#]],
);
}
#[test]
fn doc_test_type_params() {
check(
r#"
//- /lib.rs
$0
struct Foo<T, U>;
impl<T, U> Foo<T, U> {
/// ```rust
/// ````
fn t() {}
}
"#,
&[&DOCTEST],
expect![[r#"
[
Runnable {
nav: NavigationTarget {
file_id: FileId(
0,
),
full_range: 47..85,
name: "t",
},
kind: DocTest {
test_id: Path(
"Foo<T, U>::t",
),
},
cfg: None,
},
]
"#]],
);
}
}
| RunnableKind::Bench { test_id }
} else {
|
fishnet_generator.py | #--------------------------------
# Name: fishnet_generator.py
# Purpose: GSFLOW fishnet generator
# Notes: ArcGIS 10.2+ Version
# Python: 2.7
#--------------------------------
import argparse
import ConfigParser
import datetime as dt
from decimal import Decimal
import logging
import os
import sys
import arcpy
from arcpy import env
import support_functions as support
def fishnet_func(config_path, overwrite_flag=False):
"""GSFLOW Fishnet Generator
Args:
config_file (str): Project config file path
ovewrite_flag (bool): if True, overwrite existing files
debug_flag (bool): if True, enable debug level logging
Parameters
----------
config_path : str
Project configuration file (.ini) path.
ovewrite_flag : bool
If True, overwrite existing files (the default is False).
Returns
-------
None
"""
# Initialize hru parameters class
hru = support.HRUParameters(config_path)
# Open input parameter config file
inputs_cfg = ConfigParser.ConfigParser()
try:
inputs_cfg.readfp(open(config_path))
except Exception as e:
logging.error(
'\nERROR: Config file could not be read, '
'is not an input file, or does not exist\n'
' config_file = {}\n'
' Exception: {}\n'.format(config_path, e))
sys.exit()
# Log DEBUG to file
log_file_name = 'fishnet_generator_log.txt'
log_console = logging.FileHandler(
filename=os.path.join(hru.log_ws, log_file_name), mode='w')
log_console.setLevel(logging.DEBUG)
log_console.setFormatter(logging.Formatter('%(message)s'))
logging.getLogger('').addHandler(log_console)
logging.info('\nGSFLOW Fishnet Generator')
# Warn the user if the fishnet already exists
# It might be better to not allow the user to do this at all and force them
# to manually remove the file.
if arcpy.Exists(hru.polygon_path) and not overwrite_flag:
logging.warning('\nWARNING: The existing fishnet/grid will be '
'over written\n {}'.format(hru.polygon_path))
raw_input('Press ENTER to continue')
# Check input paths
study_area_path = inputs_cfg.get('INPUTS', 'study_area_path')
if not arcpy.Exists(study_area_path):
logging.error(
'\nERROR: Study area ({}) does not exist'.format(
study_area_path))
sys.exit()
# For now, study area has to be a polygon
if arcpy.Describe(study_area_path).datasetType != 'FeatureClass':
logging.error(
'\nERROR: For now, study area must be a polygon shapefile')
sys.exit()
# Read Fishnet specific parameters from INI
# If ref_x and ref_y are not specified, get from the study area extent
try:
hru.ref_x = inputs_cfg.getfloat('INPUTS', 'hru_ref_x')
except:
hru.ref_x = arcpy.Describe(study_area_path).extent.XMin
logging.info(
' {0} parameter not set in INI, setting {0} = {1}'.format(
'ref_x', hru.ref_x))
try:
hru.ref_y = inputs_cfg.getfloat('INPUTS', 'hru_ref_y')
except:
hru.ref_y = arcpy.Describe(study_area_path).extent.YMin
logging.info(
' {0} parameter not set in INI, setting {0} = {1}'.format(
'ref_y', hru.ref_y))
try:
buffer_cells = inputs_cfg.getint('INPUTS', 'hru_buffer_cells')
except:
buffer_cells = 2
logging.info(
' Missing INI parameter, setting {} = {}'.format(
'buffer_cells', buffer_cells))
try:
snap_method = inputs_cfg.get('INPUTS', 'hru_param_snap_method')
except:
snap_method = 'EXPAND'
logging.info(
' Missing INI parameter, setting {} = {}'.format(
'snap_method', snap_method))
snap_method_list = ['EXPAND', 'ROUND', 'SHRINK']
if snap_method not in snap_method_list:
logging.error('\nERROR: {} must be: {}'.format(
'snap_method', ', '.join(snap_method_list)))
sys.exit()
# Log input hru parameters
logging.info('\nFishnet Parameters')
logging.info(' Cellsize: {}'.format(hru.cs))
logging.info(' Snap point: {} {}'.format(hru.ref_x, hru.ref_y))
logging.debug(' Buffer cells: {}'.format(buffer_cells))
# Read reference point as string for determining number of digits
try:
digits = abs(min(
Decimal(inputs_cfg.get('INPUTS', 'hru_ref_x')).as_tuple().exponent,
Decimal(inputs_cfg.get('INPUTS', 'hru_ref_y')).as_tuple().exponent))
except ConfigParser.NoOptionError:
digits = 10
logging.debug(' Extent digits: {}'.format(digits))
# Check inputs
if buffer_cells < 0:
logging.error('\nERROR: Buffer cells must be greater than 0')
sys.exit()
# Build output folder if necessary
fishnet_temp_ws = os.path.join(hru.param_ws, 'fishnet_temp')
if not os.path.isdir(fishnet_temp_ws):
os.mkdir(fishnet_temp_ws)
# Output paths
study_area_proj_path = os.path.join(
fishnet_temp_ws, 'projected_study_area.shp')
# Set ArcGIS environment variables
arcpy.CheckOutExtension('Spatial')
env.overwriteOutput = True
env.pyramid = 'PYRAMIDS -1'
# env.pyramid = 'PYRAMIDS 0'
env.workspace = hru.param_ws
env.scratchWorkspace = hru.scratch_ws
# Get spatial reference of study_area
hru.sr = arcpy.Describe(study_area_path).spatialReference
# If study area spat_ref doesn't match hru_param spat_ref
# Project study are to hru_param and get projected extent
# Otherwise, read study_area extent directly
study_area_extent = arcpy.Describe(study_area_path).extent
logging.debug('\n Study area: {}'.format(study_area_path))
logging.debug(' Study area spat. ref.: {}'.format(hru.sr.name))
logging.debug(' Study area GCS: {}'.format(hru.sr.GCS.name))
logging.info(' Study Area extent: {}'.format(
support.extent_string(study_area_extent)))
# Check if the study area shapefile is projeted
if (hru.sr.name in ['GCS_North_American_1983', 'GCS_WGS_1984'] or
hru.sr.GCS.name == hru.sr.name):
logging.warning(
'\nWARNING: The study area shapefile does not appear to be projected.'
'\n This will likely cause problems or not work at all.'
'\n Projection: {}'.format(hru.sr.name))
raw_input('Press ENTER to continue\n')
# Buffer extent
buffer_extent = support.buffer_extent_func(
study_area_extent, buffer_cells * hru.cs)
logging.info(' Buffered Extent: {}'.format(
support.extent_string(buffer_extent)))
# Adjust study area extent to reference points
# Set the number of digits of rounding based on the number digits
# int the reference points
hru.ref_pnt = arcpy.Point(hru.ref_x, hru.ref_y)
hru.extent = support.adjust_extent_to_snap(
buffer_extent, hru.ref_pnt, hru.cs,
method=snap_method, digits=digits)
logging.info(' Snapped Extent: {}'.format(
support.extent_string(hru.extent)))
# Build hru_param
logging.info('\nBuilding HRU parameter fishnet')
build_fishnet_func(
hru.polygon_path, hru.point_path, hru.extent, hru.cs, hru.sr)
# Write initial parameters to hru_param (X/Y, ROW/COL, Unique ID)
# set_hru_id_func(hru.polygon_path, hru.extent, hru.cs)
def build_fishnet_func(hru_polygon_path, hru_point_path, extent, cs, sr):
|
def arg_parse():
""""""
parser = argparse.ArgumentParser(
description='Fishnet Generator',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'-i', '--ini', required=True,
help='Project input file', metavar='PATH')
parser.add_argument(
'-o', '--overwrite', default=False, action="store_true",
help='Force overwrite of existing files')
parser.add_argument(
'-d', '--debug', default=logging.INFO, const=logging.DEBUG,
help='Debug level logging', action="store_const", dest="loglevel")
args = parser.parse_args()
# Convert input file to an absolute path
if os.path.isfile(os.path.abspath(args.ini)):
args.ini = os.path.abspath(args.ini)
return args
if __name__ == '__main__':
args = arg_parse()
logging.basicConfig(level=args.loglevel, format='%(message)s')
logging.info('\n{}'.format('#' * 80))
log_f = '{:<20s} {}'
logging.info(log_f.format(
'Run Time Stamp:', dt.datetime.now().isoformat(' ')))
logging.info(log_f.format('Current Directory:', os.getcwd()))
logging.info(log_f.format('Script:', os.path.basename(sys.argv[0])))
fishnet_func(config_path=args.ini, overwrite_flag=args.overwrite)
| """"""
# Remove existing
if arcpy.Exists(hru_polygon_path):
arcpy.Delete_management(hru_polygon_path)
if arcpy.Exists(hru_point_path):
arcpy.Delete_management(hru_point_path)
# Calculate LL/UR corner points
origin_pnt = (extent.XMin, extent.YMin)
yaxis_pnt = (extent.XMin, extent.YMin + cs)
corner_pnt = (extent.XMax, extent.YMax)
origin_str = ' '.join(map(str, origin_pnt))
yaxis_str = ' '.join(map(str, yaxis_pnt))
corner_str = ' '.join(map(str, corner_pnt))
logging.debug(' Origin: {}'.format(origin_str))
logging.debug(' Y-Axis: {}'.format(yaxis_str))
logging.debug(' Corner: {}'.format(corner_str))
# Build fishnet & labels
arcpy.CreateFishnet_management(
hru_polygon_path, origin_str, yaxis_str, cs, cs,
'0', '0', corner_str, 'LABELS', '#', 'POLYGON')
arcpy.DefineProjection_management(hru_polygon_path, sr)
arcpy.DefineProjection_management(hru_point_path, sr) |
mod.rs | // Copyright 2017 Peter Williams <[email protected]> <[email protected]>
use std::cmp::Ordering;
use std::convert::From;
use std::fmt;
use std::fmt::Debug;
use std::hash::*;
use std::rc::Rc;
use std::str::FromStr;
use regex::*;
use pw_gix::{
gdk,
glib::{self, StaticType},
gtk::{self, prelude::*},
wrapper::*,
};
use crate::colour::*;
use crate::error::*;
pub mod display;
pub mod entry;
pub mod factory;
pub mod hue_wheel;
pub trait CharacteristicsInterface: Debug + Hash + PartialEq + Clone + Copy + ToString {
type Entry: CharacteristicsEntryInterface<Self>;
fn tv_row_len() -> usize;
fn tv_columns(start_col_id: i32) -> Vec<gtk::TreeViewColumn>;
fn from_floats(floats: &Vec<f64>) -> Self;
fn from_str(string: &str) -> Result<Self, PaintError<Self>>;
fn tv_rows(&self) -> Vec<glib::Value>;
fn gui_display_widget(&self) -> gtk::Box;
fn to_floats(&self) -> Vec<f64>;
}
pub trait CharacteristicsEntryInterface<C: CharacteristicsInterface> {
fn create() -> Rc<Self>;
fn pwo(&self) -> gtk::Grid;
fn get_characteristics(&self) -> Option<C>;
fn set_characteristics(&self, o_characteristics: Option<&C>);
fn connect_changed<F: 'static + Fn()>(&self, callback: F);
}
pub trait ColourAttributesInterface: WidgetWrapper {
fn create() -> Rc<Self>;
fn tv_columns() -> Vec<gtk::TreeViewColumn>;
fn scalar_attributes() -> Vec<ScalarAttribute>;
fn set_colour(&self, colour: Option<&Colour>);
fn set_target_colour(&self, target_colour: Option<&Colour>);
}
pub trait ColouredItemInterface {
fn colour(&self) -> Colour;
fn rgb(&self) -> RGB {
self.colour().rgb()
}
fn hue(&self) -> Option<Hue> {
self.colour().hue()
}
fn is_grey(&self) -> bool {
self.colour().is_grey()
}
fn chroma(&self) -> f64 {
self.colour().chroma()
}
fn greyness(&self) -> f64 {
self.colour().greyness()
}
fn value(&self) -> f64 {
self.colour().value()
}
fn warmth(&self) -> f64 {
self.colour().warmth()
}
fn monochrome_rgb(&self) -> RGB {
self.colour().monochrome_rgb()
}
fn best_foreground_rgb(&self) -> RGB {
self.colour().best_foreground_rgb()
}
fn max_chroma_rgb(&self) -> RGB {
self.colour().max_chroma_rgb()
}
fn warmth_rgb(&self) -> RGB {
self.colour().warmth_rgb()
}
fn scalar_attribute(&self, attr: ScalarAttribute) -> f64 {
self.colour().scalar_attribute(attr)
}
}
pub trait BasicPaintInterface<C>: Clone + PartialEq + Ord + Debug + ColouredItemInterface
where
C: CharacteristicsInterface,
{
fn name(&self) -> String;
fn notes(&self) -> String;
fn tooltip_text(&self) -> String;
fn characteristics(&self) -> C;
fn get_spec(&self) -> BasicPaintSpec<C> {
BasicPaintSpec::<C> {
rgb: self.rgb(),
name: self.name(),
notes: self.notes(),
characteristics: self.characteristics(),
}
}
fn matches_spec(&self, spec: &BasicPaintSpec<C>) -> bool {
if self.rgb() != spec.rgb {
false
} else if self.name() != spec.name {
false
} else if self.notes() != spec.notes {
false
} else if self.characteristics() != spec.characteristics {
false
} else {
true
}
}
fn tv_row_len() -> usize {
14 + C::tv_row_len()
}
fn tv_rows(&self) -> Vec<glib::Value> {
let rgba: gdk::RGBA = self.rgb().into_gdk_rgba();
let frgba: gdk::RGBA = self.rgb().best_foreground_rgb().into_gdk_rgba();
let mrgba: gdk::RGBA = self.monochrome_rgb().into_gdk_rgba();
let mfrgba: gdk::RGBA = self.monochrome_rgb().best_foreground_rgb().into_gdk_rgba();
let wrgba: gdk::RGBA = self.warmth_rgb().into_gdk_rgba();
let wfrgba: gdk::RGBA = self.warmth_rgb().best_foreground_rgb().into_gdk_rgba();
let hrgba: gdk::RGBA = self.max_chroma_rgb().into_gdk_rgba();
let angle = if let Some(hue) = self.hue() {
hue.angle().radians()
} else {
0.0
};
let mut rows = vec![
self.name().to_value(),
self.notes().to_value(),
format!("{:5.4}", self.chroma()).to_value(),
format!("{:5.4}", self.greyness()).to_value(),
format!("{:5.4}", self.value()).to_value(),
format!("{:5.4}", self.warmth()).to_value(),
rgba.to_value(),
frgba.to_value(),
mrgba.to_value(),
mfrgba.to_value(),
wrgba.to_value(),
wfrgba.to_value(),
hrgba.to_value(),
angle.to_value(),
];
for row in self.characteristics().tv_rows().iter() {
rows.push(row.clone());
}
rows
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct BasicPaintSpec<C: CharacteristicsInterface> {
pub rgb: RGB,
pub name: String,
pub notes: String,
pub characteristics: C,
}
impl<C: CharacteristicsInterface> From<BasicPaint<C>> for BasicPaintSpec<C> {
fn from(paint: BasicPaint<C>) -> BasicPaintSpec<C> {
BasicPaintSpec::<C> {
rgb: paint.rgb(),
name: paint.name(),
notes: paint.notes(),
characteristics: paint.characteristics(),
}
}
}
#[derive(Debug, Clone)]
pub struct BasicPaintCore<C: CharacteristicsInterface> {
colour: Colour,
name: String,
notes: String,
characteristics: C,
}
pub type BasicPaint<C> = Rc<BasicPaintCore<C>>;
pub trait FromSpec<C: CharacteristicsInterface> {
fn from_spec(spec: &BasicPaintSpec<C>) -> Self;
}
impl<C: CharacteristicsInterface> FromSpec<C> for BasicPaint<C> {
fn from_spec(spec: &BasicPaintSpec<C>) -> BasicPaint<C> {
Rc::new(BasicPaintCore::<C> {
colour: Colour::from(spec.rgb),
name: spec.name.clone(),
notes: spec.notes.clone(),
characteristics: spec.characteristics,
})
}
}
impl<C: CharacteristicsInterface> ColouredItemInterface for BasicPaint<C> {
fn colour(&self) -> Colour {
self.colour.clone()
}
}
impl<C: CharacteristicsInterface> PartialEq for BasicPaintCore<C> {
fn eq(&self, other: &BasicPaintCore<C>) -> bool {
self.name == other.name
}
}
impl<C: CharacteristicsInterface> Eq for BasicPaintCore<C> {}
impl<C: CharacteristicsInterface> PartialOrd for BasicPaintCore<C> {
fn partial_cmp(&self, other: &BasicPaintCore<C>) -> Option<Ordering> {
self.name.partial_cmp(&other.name)
}
}
impl<C: CharacteristicsInterface> Ord for BasicPaintCore<C> {
fn cmp(&self, other: &BasicPaintCore<C>) -> Ordering {
self.name.cmp(&other.name)
}
}
impl<C> BasicPaintInterface<C> for BasicPaint<C>
where
C: CharacteristicsInterface,
{
fn name(&self) -> String {
self.name.clone()
}
fn notes(&self) -> String {
self.notes.clone()
}
fn tooltip_text(&self) -> String {
if self.notes.len() > 0 {
format!("{}\n{}", self.name, self.notes)
} else {
format!("{}", self.name)
}
}
fn characteristics(&self) -> C {
self.characteristics.clone()
}
}
lazy_static! {
pub static ref BASIC_PAINT_RE: Regex = Regex::new(
r#"^(?P<ptype>\w+)\((name=)?"(?P<name>.+)", rgb=(?P<rgb>RGB(16)?\([^)]+\))(?P<characteristics>(?:, \w+="\w+")*)(, notes="(?P<notes>.*)")?\)$"#
).unwrap();
}
impl<C: CharacteristicsInterface> FromStr for BasicPaintSpec<C> {
type Err = PaintError<C>;
fn from_str(string: &str) -> Result<BasicPaintSpec<C>, PaintError<C>> {
let captures = BASIC_PAINT_RE.captures(string).ok_or(PaintError::from(
PaintErrorType::MalformedText(string.to_string()),
))?;
let c_match = captures.name("characteristics").ok_or(PaintError::from(
PaintErrorType::MalformedText(string.to_string()),
))?;
let rgb_match =
captures
.name("rgb")
.ok_or(PaintError::from(PaintErrorType::MalformedText(
string.to_string(),
)))?;
let name_match =
captures
.name("name")
.ok_or(PaintError::from(PaintErrorType::MalformedText(
string.to_string(),
)))?;
let characteristics = C::from_str(c_match.as_str())?;
let rgb16 = RGB16::from_str(rgb_match.as_str())?;
let notes = match captures.name("notes") {
Some(notes_match) => notes_match.as_str().to_string(),
None => "".to_string(),
};
Ok(BasicPaintSpec::<C> {
rgb: RGB::from(rgb16),
name: name_match.as_str().to_string().replace("\\\"", "\""),
notes: notes.replace("\\\"", "\""),
characteristics: characteristics,
})
}
}
impl<C: CharacteristicsInterface> fmt::Display for BasicPaintSpec<C> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"PaintSpec(name=\"{}\", rgb={}, {}, notes=\"{}\")",
self.name.replace("\"", "\\\""),
RGB16::from(self.rgb).to_string(),
self.characteristics.to_string(),
self.notes.replace("\"", "\\\"")
)
}
}
pub const SP_NAME: i32 = 0;
pub const SP_NOTES: i32 = 1;
pub const SP_CHROMA: i32 = 2;
pub const SP_GREYNESS: i32 = 3;
pub const SP_VALUE: i32 = 4;
pub const SP_WARMTH: i32 = 5;
pub const SP_RGB: i32 = 6;
pub const SP_RGB_FG: i32 = 7;
pub const SP_MONO_RGB: i32 = 8;
pub const SP_MONO_RGB_FG: i32 = 9;
pub const SP_WARMTH_RGB: i32 = 10;
pub const SP_WARMTH_RGB_FG: i32 = 11;
pub const SP_HUE_RGB: i32 = 12;
pub const SP_HUE_ANGLE: i32 = 13;
pub const SP_CHARS_0: i32 = 14;
pub const SP_CHARS_1: i32 = 15;
pub const SP_CHARS_2: i32 = 16;
pub const SP_CHARS_3: i32 = 17;
lazy_static! {
pub static ref STANDARD_PAINT_ROW_SPEC: [glib::Type; 18] =
[
glib::Type::String, // 0 Name
glib::Type::String, // 1 Notes
glib::Type::String, // 2 Chroma
glib::Type::String, // 3 Greyness
glib::Type::String, // 4 Value
glib::Type::String, // 5 Warmth
gdk::RGBA::static_type(), // 6 RGB
gdk::RGBA::static_type(), // 7 FG for RGB
gdk::RGBA::static_type(), // 8 Monochrome RGB
gdk::RGBA::static_type(), // 9 FG for Monochrome RGB
gdk::RGBA::static_type(), // 10 Warmth RGB
gdk::RGBA::static_type(), // 11 FG for Warmth RGB | glib::Type::String, // 16 Characteristic #3
glib::Type::String, // 17 Characteristic #4
];
}
pub trait PaintTreeViewColumnSpec {
fn tv_columns() -> Vec<gtk::TreeViewColumn>;
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn basic_paint_basic_paint_regex() {
let test_str = r#"ModelPaint(name="71.001 White", rgb=RGB16(red=0xF800, green=0xFA00, blue=0xF600), transparency="O", finish="F", metallic="NM", fluorescence="NF", notes="FS37925 RAL9016 RLM21")"#.to_string();
assert!(BASIC_PAINT_RE.is_match(&test_str));
let captures = BASIC_PAINT_RE.captures(&test_str).unwrap();
assert_eq!(captures.name("ptype").unwrap().as_str(), "ModelPaint");
assert_eq!(
captures.name("rgb").unwrap().as_str(),
"RGB16(red=0xF800, green=0xFA00, blue=0xF600)"
);
assert_eq!(
captures.name("characteristics").unwrap().as_str(),
", transparency=\"O\", finish=\"F\", metallic=\"NM\", fluorescence=\"NF\""
);
assert_eq!(
captures.name("notes").unwrap().as_str(),
"FS37925 RAL9016 RLM21"
);
}
#[test]
fn basic_paint_basic_paint_obsolete_regex() {
let test_str = r#"NamedColour(name="XF 1: Flat Black *", rgb=RGB(0x2D00, 0x2B00, 0x3000), transparency="O", finish="F")"#.to_string();
assert!(BASIC_PAINT_RE.is_match(&test_str));
let captures = BASIC_PAINT_RE.captures(&test_str).unwrap();
assert_eq!(captures.name("ptype").unwrap().as_str(), "NamedColour");
assert_eq!(
captures.name("rgb").unwrap().as_str(),
"RGB(0x2D00, 0x2B00, 0x3000)"
);
assert_eq!(
captures.name("characteristics").unwrap().as_str(),
", transparency=\"O\", finish=\"F\""
);
assert_eq!(captures.name("notes"), None);
}
} | gdk::RGBA::static_type(), // 12 Hue Colour
f64::static_type(), // 13 Hue angle (radians)
glib::Type::String, // 14 Characteristic #1
glib::Type::String, // 15 Characteristic #2 |
taggetTemplate1.js | // tagget templates = processar o template dentro de uma função
function ta | artes, ...valores) {
console.log(partes)
console.log(valores)
return 'Outra String'
}
const aluno = 'Gui'
const situacao = 'Aprovado'
console.log(tag`${aluno} está ${situacao}.`)
| g(p |
mod.rs | //! Method lookup: the secret sauce of Rust. See the [rustc dev guide] for more information.
//!
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/method-lookup.html
mod confirm;
mod prelude2021;
pub mod probe;
mod suggest;
pub use self::suggest::SelfSource;
pub use self::CandidateSource::*;
pub use self::MethodError::*;
use crate::check::FnCtxt;
use crate::ObligationCause;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Applicability, DiagnosticBuilder};
use rustc_hir as hir;
use rustc_hir::def::{CtorOf, DefKind, Namespace};
use rustc_hir::def_id::DefId;
use rustc_infer::infer::{self, InferOk};
use rustc_middle::ty::subst::Subst;
use rustc_middle::ty::subst::{InternalSubsts, SubstsRef};
use rustc_middle::ty::GenericParamDefKind;
use rustc_middle::ty::{self, ToPredicate, Ty, TypeFoldable};
use rustc_span::symbol::Ident;
use rustc_span::Span;
use rustc_trait_selection::traits;
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt;
use self::probe::{IsSuggestion, ProbeScope};
pub fn provide(providers: &mut ty::query::Providers) {
probe::provide(providers);
}
#[derive(Clone, Copy, Debug)]
pub struct MethodCallee<'tcx> {
/// Impl method ID, for inherent methods, or trait method ID, otherwise.
pub def_id: DefId,
pub substs: SubstsRef<'tcx>,
/// Instantiated method signature, i.e., it has been
/// substituted, normalized, and has had late-bound
/// lifetimes replaced with inference variables.
pub sig: ty::FnSig<'tcx>,
}
#[derive(Debug)]
pub enum MethodError<'tcx> {
// Did not find an applicable method, but we did find various near-misses that may work.
NoMatch(NoMatchData<'tcx>),
// Multiple methods might apply.
Ambiguity(Vec<CandidateSource>),
// Found an applicable method, but it is not visible. The third argument contains a list of
// not-in-scope traits which may work.
PrivateMatch(DefKind, DefId, Vec<DefId>),
// Found a `Self: Sized` bound where `Self` is a trait object, also the caller may have
// forgotten to import a trait.
IllegalSizedBound(Vec<DefId>, bool, Span),
// Found a match, but the return type is wrong
BadReturnType,
}
// Contains a list of static methods that may apply, a list of unsatisfied trait predicates which
// could lead to matches if satisfied, and a list of not-in-scope traits which may work.
#[derive(Debug)]
pub struct NoMatchData<'tcx> {
pub static_candidates: Vec<CandidateSource>,
pub unsatisfied_predicates:
Vec<(ty::Predicate<'tcx>, Option<ty::Predicate<'tcx>>, Option<ObligationCause<'tcx>>)>,
pub out_of_scope_traits: Vec<DefId>,
pub lev_candidate: Option<ty::AssocItem>,
pub mode: probe::Mode,
}
impl<'tcx> NoMatchData<'tcx> {
pub fn new(
static_candidates: Vec<CandidateSource>,
unsatisfied_predicates: Vec<(
ty::Predicate<'tcx>,
Option<ty::Predicate<'tcx>>,
Option<ObligationCause<'tcx>>,
)>,
out_of_scope_traits: Vec<DefId>,
lev_candidate: Option<ty::AssocItem>,
mode: probe::Mode,
) -> Self {
NoMatchData {
static_candidates,
unsatisfied_predicates,
out_of_scope_traits,
lev_candidate,
mode,
}
}
}
// A pared down enum describing just the places from which a method
// candidate can arise. Used for error reporting only.
#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub enum CandidateSource {
ImplSource(DefId),
TraitSource(DefId /* trait id */),
}
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Determines whether the type `self_ty` supports a method name `method_name` or not.
#[instrument(level = "debug", skip(self))]
pub fn method_exists(
&self,
method_name: Ident,
self_ty: Ty<'tcx>,
call_expr_id: hir::HirId,
allow_private: bool,
) -> bool {
let mode = probe::Mode::MethodCall;
match self.probe_for_name(
method_name.span,
mode,
method_name,
IsSuggestion(false),
self_ty,
call_expr_id,
ProbeScope::TraitsInScope,
) {
Ok(..) => true,
Err(NoMatch(..)) => false,
Err(Ambiguity(..)) => true,
Err(PrivateMatch(..)) => allow_private,
Err(IllegalSizedBound(..)) => true,
Err(BadReturnType) => bug!("no return type expectations but got BadReturnType"),
}
}
/// Adds a suggestion to call the given method to the provided diagnostic.
#[instrument(level = "debug", skip(self, err, call_expr))]
crate fn suggest_method_call(
&self,
err: &mut DiagnosticBuilder<'a>,
msg: &str,
method_name: Ident,
self_ty: Ty<'tcx>,
call_expr: &hir::Expr<'_>,
span: Option<Span>,
) {
let params = self
.probe_for_name(
method_name.span,
probe::Mode::MethodCall,
method_name,
IsSuggestion(false),
self_ty,
call_expr.hir_id,
ProbeScope::TraitsInScope,
)
.map(|pick| {
let sig = self.tcx.fn_sig(pick.item.def_id);
sig.inputs().skip_binder().len().saturating_sub(1)
})
.unwrap_or(0);
// Account for `foo.bar<T>`;
let sugg_span = span.unwrap_or(call_expr.span).shrink_to_hi();
let (suggestion, applicability) = (
format!("({})", (0..params).map(|_| "_").collect::<Vec<_>>().join(", ")),
if params > 0 { Applicability::HasPlaceholders } else { Applicability::MaybeIncorrect },
);
err.span_suggestion_verbose(sugg_span, msg, suggestion, applicability);
}
/// Performs method lookup. If lookup is successful, it will return the callee
/// and store an appropriate adjustment for the self-expr. In some cases it may
/// report an error (e.g., invoking the `drop` method).
///
/// # Arguments
///
/// Given a method call like `foo.bar::<T1,...Tn>(a, b + 1, ...)`:
///
/// * `self`: the surrounding `FnCtxt` (!)
/// * `self_ty`: the (unadjusted) type of the self expression (`foo`)
/// * `segment`: the name and generic arguments of the method (`bar::<T1, ...Tn>`)
/// * `span`: the span for the method call
/// * `call_expr`: the complete method call: (`foo.bar::<T1,...Tn>(...)`)
/// * `self_expr`: the self expression (`foo`)
/// * `args`: the expressions of the arguments (`a, b + 1, ...`)
#[instrument(level = "debug", skip(self, call_expr, self_expr))]
pub fn lookup_method(
&self,
self_ty: Ty<'tcx>,
segment: &hir::PathSegment<'_>,
span: Span,
call_expr: &'tcx hir::Expr<'tcx>,
self_expr: &'tcx hir::Expr<'tcx>,
args: &'tcx [hir::Expr<'tcx>],
) -> Result<MethodCallee<'tcx>, MethodError<'tcx>> {
debug!(
"lookup(method_name={}, self_ty={:?}, call_expr={:?}, self_expr={:?})",
segment.ident, self_ty, call_expr, self_expr
);
let pick =
self.lookup_probe(span, segment.ident, self_ty, call_expr, ProbeScope::TraitsInScope)?;
self.lint_dot_call_from_2018(self_ty, segment, span, call_expr, self_expr, &pick, args);
for import_id in &pick.import_ids {
debug!("used_trait_import: {:?}", import_id);
Lrc::get_mut(&mut self.typeck_results.borrow_mut().used_trait_imports)
.unwrap()
.insert(*import_id);
}
self.tcx.check_stability(pick.item.def_id, Some(call_expr.hir_id), span, None);
let result =
self.confirm_method(span, self_expr, call_expr, self_ty, pick.clone(), segment);
debug!("result = {:?}", result);
if let Some(span) = result.illegal_sized_bound {
let mut needs_mut = false;
if let ty::Ref(region, t_type, mutability) = self_ty.kind() {
let trait_type = self
.tcx
.mk_ref(region, ty::TypeAndMut { ty: t_type, mutbl: mutability.invert() });
// We probe again to see if there might be a borrow mutability discrepancy.
match self.lookup_probe(
span,
segment.ident,
trait_type,
call_expr,
ProbeScope::TraitsInScope,
) {
Ok(ref new_pick) if *new_pick != pick => {
needs_mut = true;
}
_ => {}
}
}
// We probe again, taking all traits into account (not only those in scope).
let candidates = match self.lookup_probe(
span,
segment.ident,
self_ty,
call_expr,
ProbeScope::AllTraits,
) {
// If we find a different result the caller probably forgot to import a trait.
Ok(ref new_pick) if *new_pick != pick => vec![new_pick.item.container.id()],
Err(Ambiguity(ref sources)) => sources
.iter()
.filter_map(|source| {
match *source {
// Note: this cannot come from an inherent impl,
// because the first probing succeeded.
ImplSource(def) => self.tcx.trait_id_of_impl(def),
TraitSource(_) => None,
}
})
.collect(),
_ => Vec::new(),
};
return Err(IllegalSizedBound(candidates, needs_mut, span));
}
Ok(result.callee)
}
#[instrument(level = "debug", skip(self, call_expr))]
pub fn lookup_probe(
&self,
span: Span,
method_name: Ident,
self_ty: Ty<'tcx>,
call_expr: &'tcx hir::Expr<'tcx>,
scope: ProbeScope,
) -> probe::PickResult<'tcx> {
let mode = probe::Mode::MethodCall;
let self_ty = self.resolve_vars_if_possible(self_ty);
self.probe_for_name(
span,
mode,
method_name,
IsSuggestion(false),
self_ty,
call_expr.hir_id,
scope,
)
}
pub(super) fn obligation_for_method(
&self,
span: Span,
trait_def_id: DefId,
self_ty: Ty<'tcx>,
opt_input_types: Option<&[Ty<'tcx>]>,
) -> (traits::Obligation<'tcx, ty::Predicate<'tcx>>, &'tcx ty::List<ty::subst::GenericArg<'tcx>>)
|
/// `lookup_method_in_trait` is used for overloaded operators.
/// It does a very narrow slice of what the normal probe/confirm path does.
/// In particular, it doesn't really do any probing: it simply constructs
/// an obligation for a particular trait with the given self type and checks
/// whether that trait is implemented.
//
// FIXME(#18741): it seems likely that we can consolidate some of this
// code with the other method-lookup code. In particular, the second half
// of this method is basically the same as confirmation.
#[instrument(level = "debug", skip(self, span, opt_input_types))]
pub(super) fn lookup_method_in_trait(
&self,
span: Span,
m_name: Ident,
trait_def_id: DefId,
self_ty: Ty<'tcx>,
opt_input_types: Option<&[Ty<'tcx>]>,
) -> Option<InferOk<'tcx, MethodCallee<'tcx>>> {
debug!(
"lookup_in_trait_adjusted(self_ty={:?}, m_name={}, trait_def_id={:?}, opt_input_types={:?})",
self_ty, m_name, trait_def_id, opt_input_types
);
let (obligation, substs) =
self.obligation_for_method(span, trait_def_id, self_ty, opt_input_types);
// Now we want to know if this can be matched
if !self.predicate_may_hold(&obligation) {
debug!("--> Cannot match obligation");
// Cannot be matched, no such method resolution is possible.
return None;
}
// Trait must have a method named `m_name` and it should not have
// type parameters or early-bound regions.
let tcx = self.tcx;
let method_item = match self.associated_item(trait_def_id, m_name, Namespace::ValueNS) {
Some(method_item) => method_item,
None => {
tcx.sess.delay_span_bug(
span,
"operator trait does not have corresponding operator method",
);
return None;
}
};
let def_id = method_item.def_id;
let generics = tcx.generics_of(def_id);
assert_eq!(generics.params.len(), 0);
debug!("lookup_in_trait_adjusted: method_item={:?}", method_item);
let mut obligations = vec![];
// Instantiate late-bound regions and substitute the trait
// parameters into the method type to get the actual method type.
//
// N.B., instantiate late-bound regions first so that
// `instantiate_type_scheme` can normalize associated types that
// may reference those regions.
let fn_sig = tcx.fn_sig(def_id);
let fn_sig = self.replace_bound_vars_with_fresh_vars(span, infer::FnCall, fn_sig).0;
let fn_sig = fn_sig.subst(self.tcx, substs);
let InferOk { value, obligations: o } =
self.normalize_associated_types_in_as_infer_ok(span, fn_sig);
let fn_sig = {
obligations.extend(o);
value
};
// Register obligations for the parameters. This will include the
// `Self` parameter, which in turn has a bound of the main trait,
// so this also effectively registers `obligation` as well. (We
// used to register `obligation` explicitly, but that resulted in
// double error messages being reported.)
//
// Note that as the method comes from a trait, it should not have
// any late-bound regions appearing in its bounds.
let bounds = self.tcx.predicates_of(def_id).instantiate(self.tcx, substs);
let InferOk { value, obligations: o } =
self.normalize_associated_types_in_as_infer_ok(span, bounds);
let bounds = {
obligations.extend(o);
value
};
assert!(!bounds.has_escaping_bound_vars());
let cause = traits::ObligationCause::misc(span, self.body_id);
obligations.extend(traits::predicates_for_generics(cause.clone(), self.param_env, bounds));
// Also add an obligation for the method type being well-formed.
let method_ty = tcx.mk_fn_ptr(ty::Binder::dummy(fn_sig));
debug!(
"lookup_in_trait_adjusted: matched method method_ty={:?} obligation={:?}",
method_ty, obligation
);
obligations.push(traits::Obligation::new(
cause,
self.param_env,
ty::Binder::dummy(ty::PredicateKind::WellFormed(method_ty.into())).to_predicate(tcx),
));
let callee = MethodCallee { def_id, substs, sig: fn_sig };
debug!("callee = {:?}", callee);
Some(InferOk { obligations, value: callee })
}
/// Performs a [full-qualified function call] (formerly "universal function call") lookup. If
/// lookup is successful, it will return the type of definition and the [`DefId`] of the found
/// function definition.
///
/// [full-qualified function call]: https://doc.rust-lang.org/reference/expressions/call-expr.html#disambiguating-function-calls
///
/// # Arguments
///
/// Given a function call like `Foo::bar::<T1,...Tn>(...)`:
///
/// * `self`: the surrounding `FnCtxt` (!)
/// * `span`: the span of the call, excluding arguments (`Foo::bar::<T1, ...Tn>`)
/// * `method_name`: the identifier of the function within the container type (`bar`)
/// * `self_ty`: the type to search within (`Foo`)
/// * `self_ty_span` the span for the type being searched within (span of `Foo`)
/// * `expr_id`: the [`hir::HirId`] of the expression composing the entire call
#[instrument(level = "debug", skip(self))]
pub fn resolve_fully_qualified_call(
&self,
span: Span,
method_name: Ident,
self_ty: Ty<'tcx>,
self_ty_span: Span,
expr_id: hir::HirId,
) -> Result<(DefKind, DefId), MethodError<'tcx>> {
debug!(
"resolve_fully_qualified_call: method_name={:?} self_ty={:?} expr_id={:?}",
method_name, self_ty, expr_id,
);
let tcx = self.tcx;
// Check if we have an enum variant.
if let ty::Adt(adt_def, _) = self_ty.kind() {
if adt_def.is_enum() {
let variant_def = adt_def
.variants
.iter()
.find(|vd| tcx.hygienic_eq(method_name, vd.ident, adt_def.did));
if let Some(variant_def) = variant_def {
// Braced variants generate unusable names in value namespace (reserved for
// possible future use), so variants resolved as associated items may refer to
// them as well. It's ok to use the variant's id as a ctor id since an
// error will be reported on any use of such resolution anyway.
let ctor_def_id = variant_def.ctor_def_id.unwrap_or(variant_def.def_id);
tcx.check_stability(ctor_def_id, Some(expr_id), span, Some(method_name.span));
return Ok((
DefKind::Ctor(CtorOf::Variant, variant_def.ctor_kind),
ctor_def_id,
));
}
}
}
let pick = self.probe_for_name(
span,
probe::Mode::Path,
method_name,
IsSuggestion(false),
self_ty,
expr_id,
ProbeScope::TraitsInScope,
)?;
self.lint_fully_qualified_call_from_2018(
span,
method_name,
self_ty,
self_ty_span,
expr_id,
&pick,
);
debug!("resolve_fully_qualified_call: pick={:?}", pick);
{
let mut typeck_results = self.typeck_results.borrow_mut();
let used_trait_imports = Lrc::get_mut(&mut typeck_results.used_trait_imports).unwrap();
for import_id in pick.import_ids {
debug!("resolve_fully_qualified_call: used_trait_import: {:?}", import_id);
used_trait_imports.insert(import_id);
}
}
let def_kind = pick.item.kind.as_def_kind();
debug!(
"resolve_fully_qualified_call: def_kind={:?}, def_id={:?}",
def_kind, pick.item.def_id
);
tcx.check_stability(pick.item.def_id, Some(expr_id), span, Some(method_name.span));
Ok((def_kind, pick.item.def_id))
}
/// Finds item with name `item_name` defined in impl/trait `def_id`
/// and return it, or `None`, if no such item was defined there.
pub fn associated_item(
&self,
def_id: DefId,
item_name: Ident,
ns: Namespace,
) -> Option<ty::AssocItem> {
self.tcx
.associated_items(def_id)
.find_by_name_and_namespace(self.tcx, item_name, ns, def_id)
.copied()
}
}
| {
// Construct a trait-reference `self_ty : Trait<input_tys>`
let substs = InternalSubsts::for_item(self.tcx, trait_def_id, |param, _| {
match param.kind {
GenericParamDefKind::Lifetime | GenericParamDefKind::Const { .. } => {}
GenericParamDefKind::Type { .. } => {
if param.index == 0 {
return self_ty.into();
} else if let Some(input_types) = opt_input_types {
return input_types[param.index as usize - 1].into();
}
}
}
self.var_for_def(span, param)
});
let trait_ref = ty::TraitRef::new(trait_def_id, substs);
// Construct an obligation
let poly_trait_ref = ty::Binder::dummy(trait_ref);
(
traits::Obligation::misc(
span,
self.body_id,
self.param_env,
poly_trait_ref.without_const().to_predicate(self.tcx),
),
substs,
)
} |
blackjack_test.go | package blackjack_test
import (
"blackjack"
"bytes"
"math/rand"
"strings"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/mbarley333/cards"
)
// what do we need to start a blackjack game game
// we need a game server layer. NewGame func return *Game
// we need to deal cards from a deck. Deal func
// we need to control the cards dealt in order to test. Pass in stacked deck
// we need a player and a dealer. add player and dealer to Game
// add deck to Game
// deal cards to player and dealer
// dealer logic for hit/stand
// player prompt for hit/stand
func TestMultiPlayers(t *testing.T) {
stack := []cards.Card{
{Rank: cards.Ace, Suit: cards.Club},
{Rank: cards.Eight, Suit: cards.Club},
{Rank: cards.Nine, Suit: cards.Club},
{Rank: cards.Ten, Suit: cards.Spade},
{Rank: cards.Jack, Suit: cards.Club},
{Rank: cards.Ten, Suit: cards.Club},
{Rank: cards.Six, Suit: cards.Club},
{Rank: cards.Seven, Suit: cards.Spade},
{Rank: cards.Seven, Suit: cards.Club},
{Rank: cards.Four, Suit: cards.Club},
{Rank: cards.Three, Suit: cards.Club},
{Rank: cards.King, Suit: cards.Club},
}
deck := cards.Deck{
Cards: stack,
}
output := &bytes.Buffer{}
g, err := blackjack.NewBlackjackGame(
blackjack.WithCustomDeck(deck),
blackjack.WithIncomingDeck(false),
blackjack.WithOutput(output),
)
if err != nil {
t.Fatal(err)
}
g.DeckCount = 1
player := &blackjack.Player{
Name: "Planty",
Action: blackjack.None,
Cash: 99,
Hands: []*blackjack.Hand{
{
Id: 1,
Bet: 1,
Action: blackjack.ActionStand,
},
},
}
g.AddPlayer(player)
player2 := &blackjack.Player{
Name: "Kevin",
Action: blackjack.None,
Cash: 99,
Hands: []*blackjack.Hand{
{
Id: 1,
Bet: 1,
Action: blackjack.ActionStand,
},
},
}
g.AddPlayer(player2)
player3 := &blackjack.Player{
Name: "Donald",
Action: blackjack.None,
Cash: 99,
Hands: []*blackjack.Hand{
{
Id: 1,
Bet: 1,
Action: blackjack.ActionHit,
},
},
}
g.AddPlayer(player3)
g.OpeningDeal()
for _, player := range g.Players {
err := g.PlayHand(player)
if err != nil {
t.Fatal(err)
}
}
g.DealerPlay()
g.Outcome(output)
wantPlayer := blackjack.OutcomeBlackjack
gotPlayer := g.Players[0].Hands[0].Outcome
if wantPlayer != gotPlayer {
t.Fatalf("wanted: %q, got: %q", wantPlayer.String(), gotPlayer.String())
}
wantPlayer2 := blackjack.OutcomeWin
gotPlayer2 := g.Players[1].Hands[0].Outcome
if wantPlayer2 != gotPlayer2 {
t.Fatalf("wanted: %q, got: %q", wantPlayer2.String(), gotPlayer2.String())
}
wantPlayer3 := blackjack.OutcomeBust
gotPlayer3 := g.Players[2].Hands[0].Outcome
if wantPlayer3 != gotPlayer3 {
t.Fatalf("wanted: %q, got: %q", wantPlayer3.String(), gotPlayer3.String())
}
}
func TestRemoveQuitPlayers(t *testing.T) {
t.Parallel()
g := blackjack.Game{
Players: []*blackjack.Player{
{Action: blackjack.ActionStand},
{Action: blackjack.ActionQuit},
{Action: blackjack.ActionQuit},
},
}
g.Players = g.RemoveQuitPlayers()
want := 1
got := len(g.Players)
if want != got {
t.Fatalf("wanted: %d, got: %d", want, got)
}
}
func TestPayout(t *testing.T) {
t.Parallel()
type testCase struct {
bet int
cash int
outcome blackjack.Outcome
handPayout int
}
tcs := []testCase{
{bet: 1, cash: 101, outcome: blackjack.OutcomeWin, handPayout: 1},
{bet: 1, cash: 99, outcome: blackjack.OutcomeLose, handPayout: -1},
{bet: 1, cash: 100, outcome: blackjack.OutcomeTie, handPayout: 0},
{bet: 1, cash: 102, outcome: blackjack.OutcomeBlackjack, handPayout: 2},
}
for _, tc := range tcs {
want := &blackjack.Player{
Cash: tc.cash,
Hands: []*blackjack.Hand{
{
Id: 1,
Outcome: tc.outcome,
Payout: tc.handPayout,
},
},
}
p := &blackjack.Player{
Cash: 99,
Hands: []*blackjack.Hand{
{
Id: 1,
Bet: tc.bet,
Outcome: tc.outcome,
},
},
}
p.Payout()
got := p
if !cmp.Equal(want, got) {
t.Fatal(cmp.Diff(want, got))
}
}
}
func TestPlayerCash(t *testing.T) {
t.Parallel()
output := &bytes.Buffer{}
p := &blackjack.Player{
Name: "Planty",
Cash: 101,
Hands: []*blackjack.Hand{
{
Id: 1,
Outcome: blackjack.OutcomeWin,
Payout: 1,
},
},
}
p.OutcomeReport(output)
want := "Planty won $1. Cash available: $101\n"
got := output.String()
if want != got {
t.Fatalf("wanted: %q, got: %q", want, got)
}
}
func TestPlayerBroke(t *testing.T) {
t.Parallel()
g, err := blackjack.NewBlackjackGame()
if err != nil {
t.Fatal(err)
}
p := &blackjack.Player{
Name: "Planty",
Hands: []*blackjack.Hand{
{
Id: 1,
Outcome: blackjack.OutcomeLose,
Payout: -1,
},
},
Cash: 0,
}
g.AddPlayer(p)
p.Broke()
want := blackjack.ActionQuit
got := p.Action
if want != got {
t.Fatalf("want: %q, got: %q", want.String(), got.String())
}
}
func TestIncomingDeck(t *testing.T) {
t.Parallel()
output := &bytes.Buffer{}
random := rand.New(rand.NewSource(1))
g, err := blackjack.NewBlackjackGame(
blackjack.WithOutput(output),
blackjack.WithDeckCount(3),
blackjack.WithRandom(random),
)
if err != nil {
t.Fatal(err)
}
want := g.Shoe
got := g.IncomingDeck()
if cmp.Equal(want, got, cmpopts.IgnoreUnexported(cards.Deck{})) {
t.Fatal(cmp.Diff(want, got))
}
}
func | (t *testing.T) {
t.Parallel()
g := blackjack.Game{
CardsDealt: 55,
CardCounter: blackjack.CardCounter{
Count: 7,
TrueCount: 3.0,
},
}
g.ResetFieldsAfterIncomingDeck()
want := blackjack.CardCounter{
Count: 0,
TrueCount: 0,
}
got := g.CardCounter
if !cmp.Equal(want, got) {
t.Fatal(cmp.Diff(want, got))
}
wantCardsDealt := 0
gotCardsDealt := g.CardsDealt
if wantCardsDealt != gotCardsDealt {
t.Fatalf("want: %d, got: %d", wantCardsDealt, gotCardsDealt)
}
}
func TestScoreDealerHoleCard(t *testing.T) {
type testCase struct {
card cards.Card
score int
description string
}
tcs := []testCase{
{card: cards.Card{Rank: cards.Ace, Suit: cards.Club}, score: 11, description: "Ace"},
{card: cards.Card{Rank: cards.King, Suit: cards.Club}, score: 10, description: "King"},
{card: cards.Card{Rank: cards.Three, Suit: cards.Club}, score: 3, description: "Three"},
}
for _, tc := range tcs {
want := tc.score
got := blackjack.ScoreDealerHoleCard(tc.card)
if want != got {
t.Fatalf("wanted: %d, got: %d", want, got)
}
}
}
func TestDealerAi(t *testing.T) {
g, err := blackjack.NewBlackjackGame()
if err != nil {
t.Fatal(err)
}
type testCase struct {
players []*blackjack.Player
dealerHand []*blackjack.Hand
description string
result bool
}
tcs := []testCase{
{
players: []*blackjack.Player{
{Hands: []*blackjack.Hand{
{
Outcome: blackjack.OutcomeBust,
},
},
},
},
dealerHand: []*blackjack.Hand{
{
Cards: []cards.Card{{Rank: cards.Seven, Suit: cards.Club}, {Rank: cards.Seven, Suit: cards.Club}},
},
},
result: false,
description: "All Players Busted",
},
{
players: []*blackjack.Player{
{Hands: []*blackjack.Hand{
{
Outcome: blackjack.OutcomeBust,
},
},
},
{Hands: []*blackjack.Hand{
{},
},
},
},
dealerHand: []*blackjack.Hand{
{
Cards: []cards.Card{{Rank: cards.Seven, Suit: cards.Club}, {Rank: cards.Seven, Suit: cards.Club}},
},
},
result: true,
description: "All Players Not Busted",
},
{
players: []*blackjack.Player{
{Hands: []*blackjack.Hand{
{
Outcome: blackjack.OutcomeBust,
},
},
},
{Hands: []*blackjack.Hand{
{
Outcome: blackjack.OutcomeBlackjack,
},
},
},
},
dealerHand: []*blackjack.Hand{
{
Cards: []cards.Card{{Rank: cards.Seven, Suit: cards.Club}, {Rank: cards.Seven, Suit: cards.Club}},
},
},
result: false,
description: "All Players Blackjack or Bust",
},
}
for _, tc := range tcs {
g.Players = tc.players
g.Dealer.Hands = tc.dealerHand
want := tc.result
got := g.IsDealerDraw()
if want != got {
t.Fatalf("%s: wanted: %v, got: %v", tc.description, want, got)
}
g.Players = nil
g.Dealer.Hands = nil
}
}
func TestDoubleDown(t *testing.T) {
stack := []cards.Card{
{Rank: cards.Six, Suit: cards.Club},
{Rank: cards.Four, Suit: cards.Club},
{Rank: cards.Four, Suit: cards.Club},
{Rank: cards.Jack, Suit: cards.Club},
{Rank: cards.Ace, Suit: cards.Club},
{Rank: cards.Ten, Suit: cards.Club},
}
deck := cards.Deck{
Cards: stack,
}
output := &bytes.Buffer{}
input := strings.NewReader("d\nq")
g, err := blackjack.NewBlackjackGame(
blackjack.WithCustomDeck(deck),
blackjack.WithIncomingDeck(false),
blackjack.WithOutput(output),
blackjack.WithInput(input),
)
if err != nil {
t.Fatal(err)
}
p := &blackjack.Player{
Name: "planty",
Hands: []*blackjack.Hand{
{
Id: 1,
Bet: 1,
},
},
Cash: 99,
Decide: blackjack.HumanAction,
Bet: blackjack.HumanBet,
Action: blackjack.ActionDoubleDown,
}
g.AddPlayer(p)
g.OpeningDeal()
g.PlayHand(g.Players[0])
g.DealerPlay()
g.Outcome(output)
want := 102
got := g.Players[0].Cash
if want != got {
t.Fatalf("wanted: %d, got: %d", want, got)
}
}
func TestSplit(t *testing.T) {
t.Parallel()
output := &bytes.Buffer{}
stack := []cards.Card{
{Rank: cards.Six, Suit: cards.Heart},
{Rank: cards.Six, Suit: cards.Club},
{Rank: cards.Nine, Suit: cards.Spade},
{Rank: cards.Four, Suit: cards.Diamond},
}
deck := cards.Deck{
Cards: stack,
}
g, err := blackjack.NewBlackjackGame(
blackjack.WithCustomDeck(deck),
blackjack.WithOutput(output),
blackjack.WithIncomingDeck(false),
)
if err != nil {
t.Fatal(err)
}
p := &blackjack.Player{
Cash: 99,
Hands: []*blackjack.Hand{
{
Id: 1,
Bet: 1,
},
},
}
g.AddPlayer(p)
card := g.Deal(output)
g.Players[0].Hands[p.HandIndex].Cards = append(g.Players[0].Hands[p.HandIndex].Cards, card)
card = g.Deal(output)
g.Players[0].Hands[p.HandIndex].Cards = append(g.Players[0].Hands[p.HandIndex].Cards, card)
card1 := g.Deal(output)
card2 := g.Deal(output)
index := 0
g.Players[0].Split(output, card1, card2, index)
want := &blackjack.Player{
Cash: 98,
Hands: []*blackjack.Hand{
{
Id: 1,
Cards: []cards.Card{
{Rank: cards.Six, Suit: cards.Heart},
{Rank: cards.Nine, Suit: cards.Spade},
},
Bet: 1,
Action: blackjack.None,
},
{
Id: 2,
Cards: []cards.Card{
{Rank: cards.Six, Suit: cards.Club},
{Rank: cards.Four, Suit: cards.Diamond},
},
Bet: 1,
Action: blackjack.None,
},
},
}
got := g.Players[0]
if !cmp.Equal(want, got, cmpopts.IgnoreFields(blackjack.Player{}, "Message")) {
t.Fatal(cmp.Diff(want, got))
}
}
func TestGetHint(t *testing.T) {
t.Parallel()
output := &bytes.Buffer{}
input := strings.NewReader("")
p := &blackjack.Player{
Hands: []*blackjack.Hand{
{
Id: 1,
Cards: []cards.Card{
{Rank: cards.Six, Suit: cards.Heart},
{Rank: cards.Nine, Suit: cards.Spade},
},
Bet: 1,
Action: blackjack.None,
},
},
}
dealerCard := cards.Card{Rank: cards.Ten, Suit: cards.Heart}
index := 0
counter := blackjack.CardCounter{}
stage := blackjack.StageDeciding
action := blackjack.GetHint(output, input, p, dealerCard, index, counter, stage)
want := blackjack.ActionHit.String()
got := action.String()
if want != got {
t.Fatalf("want: %q, got: %q", want, got)
}
}
func TestBustOnDoubleDown(t *testing.T) {
t.Parallel()
output := &bytes.Buffer{}
stack := []cards.Card{
{Rank: cards.Ten, Suit: cards.Heart},
{Rank: cards.Six, Suit: cards.Club},
{Rank: cards.Nine, Suit: cards.Spade},
{Rank: cards.Four, Suit: cards.Diamond},
}
deck := cards.Deck{
Cards: stack,
}
g, err := blackjack.NewBlackjackGame(
blackjack.WithCustomDeck(deck),
blackjack.WithOutput(output),
blackjack.WithIncomingDeck(false),
)
if err != nil {
t.Fatal(err)
}
p := &blackjack.Player{
Cash: 99,
Hands: []*blackjack.Hand{
{
Cards: []cards.Card{
{Rank: cards.Ten, Suit: cards.Heart},
{Rank: cards.Nine, Suit: cards.Spade},
},
Id: 1,
Bet: 1,
},
},
}
g.AddPlayer(p)
card := g.Deal(output)
g.Players[0].Hands[0].DoubleDown(output, card, g.Players[0].Name)
want := blackjack.OutcomeBust.String()
got := g.Players[0].Hands[0].Outcome.String()
if want != got {
t.Fatalf("want: %q, got: %q", want, got)
}
}
| TestResetFieldsAfterIncomingDeck |
app.py | from typing import Callable, List, Dict, Union
import atexit
from collections.abc import Sequence
from copy import deepcopy
import os
from PIL import Image
from fastapi import FastAPI, UploadFile, File, Request
from fastapi.templating import Jinja2Templates
from pydantic import BaseModel
from datetime import datetime
import numpy as np
import torch
import sys
from .logger import Logger
app = FastAPI(
title="torch-deploy",
description="one line deployment for pytorch models"
)
config = None
inference_fn = None
pre = []
post = []
logger = None
templates = Jinja2Templates(directory=os.path.join(os.path.dirname(__file__), "templates"))
@atexit.register
def | ():
if logger is not None:
logger.close()
class ModelInput(BaseModel):
'''Pydantic Model to receive parameters for the /predict endpoint'''
inputs: Union[List, Dict]
def setup(my_config):
'''Initialize the global variables'''
global inference_fn, pre, post, logger, config
config = deepcopy(my_config)
# Make log directory if it doesn't exist
my_logdir = config["logdir"]
if not os.path.isdir(my_logdir):
os.mkdir(my_logdir)
# Init logger
logger = Logger(os.path.join(my_logdir, "logfile"))
# Init inference_fn
model = config["model"]
if config["inference_fn"] is not None:
inference_fn = getattr(model, config["inference_fn"])
else:
inference_fn = model
# Init preprocessing and postprocessing functions
my_pre = config["pre"]
my_post = config["post"]
if my_pre:
if isinstance(my_pre, Sequence):
pre = list(my_pre)
else:
pre = [my_pre]
if my_post:
if isinstance(my_post, Sequence):
post = list(my_post)
else:
post = [my_post]
def run_model(inp):
# Apply all preprocessing functions
for f in pre:
inp = f(inp)
# Pass input through model
output = inference_fn(inp)
# Apply all postprocessing functions
for f in post:
output = f(output)
# If torch tensor or numpy array, transform to list so we can pass it back
if isinstance(output, (np.ndarray, torch.Tensor)):
output = output.tolist()
return output
@app.get("/")
def root():
# For testing/debugging
return {"text": "Hello World!"}
@app.post("/predict")
def predict(model_input: ModelInput, request: Request):
'''
View function handling the main /predict endpoint
Input: Expect to receive an application/json body. The value of the "inputs" field
will be used as the input that will be passed to the model
and should be a list or a dict.
Output: The output of the model after being run through the postprocessing
functions.
'''
inp = model_input.inputs
# Logging
client_host = request.client.host
logger.log(f'[{datetime.now()}] Received input of size {sys.getsizeof(inp)} from {client_host}')
output = run_model(inp)
return {"output": output}
@app.get("/predict_image")
def upload_image(request: Request):
return templates.TemplateResponse("upload.html", {"request": request})
@app.post("/predict_image")
def predict_image(request: Request, file: UploadFile = File(...)):
'''
View function handling the /predict_image endpoint
Input: Expect to receive a body. The value of the "inputs" field
will be used as the input that will be passed to the model
and should be a list or a dict.
Output: The output of the model after being run through the postprocessing
functions.
'''
inp = Image.open(file.file)
# Logging
client_host = request.client.host
logger.log(f'[{datetime.now()}] Received input of size {sys.getsizeof(inp)} from {client_host}')
output = run_model(inp)
return {"output": output}
| cleanup |
test_general_SysmetricLogger.py | #!/usr/bin/env python
####################
# Required Modules #
####################
# Generic/Built-in
import os
import logging
import time
from typing import Callable
# Libs
import pytest
import structlog
# Custom
from conftest import (
SYSMETRIC_SUPPORTED_METADATA,
SYSMETRIC_TRACKERS,
DURATION,
POLL_INTERVAL,
extract_name,
reconfigure_global_structlog_params
)
from synlogger.config import SYSMETRICS_PREFIX, SYSMETRICS_PORT
from synlogger.utils import StructlogUtils
##################
# Configurations #
##################
file_path = os.path.abspath(__file__)
class_name = "SysmetricLoggerTest"
function_name = "test_SysmetricLogger_initialise"
###########################
# Tests - SysmetricLogger #
###########################
def test_SysmetricLogger_default_attibutes(sysmetric_logger_default_params):
"""
Tests for the correct initialisation defaults for the TTPLogger class
# C1: logger_name defaults to "TTP_XXX"
# C2: logging_variant defaults to "basic"
# C3: server does not need to be specified by default
# C4: port is assigned TTP_PORT by default
# C5: logging_level defaults to logging.INFO
# C6: debugging_fields defaults to False
# C7: filter_functions defaults to an empty list
# C8: censor_keys defaults to an empty list (i.e. no information censored)
"""
# C1
assert SYSMETRICS_PREFIX in sysmetric_logger_default_params.logger_name
# C2
assert sysmetric_logger_default_params.logging_variant == "basic"
# C3
assert sysmetric_logger_default_params.server is None
# C4
assert sysmetric_logger_default_params.port == SYSMETRICS_PORT
# C5
assert sysmetric_logger_default_params.logging_level == logging.INFO
# C6
assert sysmetric_logger_default_params.debugging_fields == False
# C7
assert len(sysmetric_logger_default_params.filter_functions) == 0
# C8
assert len(sysmetric_logger_default_params.censor_keys) == 0
def | (sysmetric_logger_default_params):
"""
Tests if sysmetric processors loaded are valid
# C1: All processors returned are functions
# C2: logging_renderer must be the last processor of the list
# C3: Sysmetric processors must be included alongside default processors
"""
# C1
processors = sysmetric_logger_default_params._configure_processors()
assert all(isinstance(_funct, Callable) for _funct in processors)
# C2
last_processor = processors[-1]
assert any(
extract_name(last_processor) == extract_name(_funct)
for _funct in [
StructlogUtils.graypy_structlog_processor,
structlog.processors.JSONRenderer(indent=1)
]
)
# C3
processors_names = [extract_name(processor) for processor in processors]
assert all(
extract_name(sys_tracker) in processors_names
for sys_tracker in SYSMETRIC_TRACKERS
)
def test_SysmetricLogger_is_tracking(sysmetric_logger):
"""
Tests if tracking state is toggling correctly. Note that while the state
tested here is dependent on .track() & .terminate(), we are only testing
for the change of state. The correctness of .track() & .terminate() is not
enforced and is assumed to work here.
# C1: is_tracking returns False before tracking is started
# C2: is_tracking returns True after tracking is started
# C3: is_tracking returns False after tracking has been terminated
"""
# C1
assert sysmetric_logger.is_tracking() == False
# C2
sysmetric_logger.track(
file_path=file_path,
class_name=class_name,
function_name=function_name,
resolution=POLL_INTERVAL
)
assert sysmetric_logger.is_tracking() == True
# C3
sysmetric_logger.terminate()
assert sysmetric_logger.is_tracking() == False
def test_SysmetricLogger_track(sysmetric_logger):
"""
Tests if sysmetric process tracking starts & polls correctly
# C1: Before tracking is initialised, sysmetric_logger.tracker is None
# C2: After tracking is initialised, sysmetric_logger.tracker is not None
# C3: After tracking is initialised, tracking process is actively running
# C4: No. of trials recorded tallies with expected no. of records given
a predetermined polling interval over a specified duration
# C5: Each record detected has the appropriate metadata logged
# C6: Each sysmetric metadata logged has valid values
"""
# C1
assert sysmetric_logger.tracker is None
# Start tracking process to check for state changes
sysmetric_logger.track(
file_path=file_path,
class_name=class_name,
function_name=function_name,
resolution=POLL_INTERVAL
)
# C2
assert sysmetric_logger.tracker is not None
# C3
assert sysmetric_logger.tracker.is_alive()
with reconfigure_global_structlog_params(sysmetric_logger) as cap_logs:
sysmetric_logger.synlog.setLevel(logging.INFO) # V.IMPT!!!
###########################
# Implementation Footnote #
###########################
# [Cause]
# Structlog's log capture mechanism does not allow for log capturing
# from multiprocessed loggers with custom processors (i.e. non-global).
# [Problems]
# Sysmetric tracking is performed by running a backgrounded process
# polling for logs once every specified interval. Being a struclog
# logger, it suffers from the aforementioned limitations DURING TESTING.
# This results in the failure to capture logs for analysis/testing.
# [Solution]
# Manually simulate probing behaviour in the global context, using
# custom processors that are same as the ones running in the
# backgrounded logger.
trial_count = int(DURATION/POLL_INTERVAL)
for _ in range(trial_count):
sysmetric_logger._probe(
resolution=POLL_INTERVAL,
descriptors={
"ID_path": file_path,
"ID_class": class_name,
"ID_function": function_name
}
)
# C4
assert len(cap_logs) == trial_count
# C5
assert all(
set(SYSMETRIC_SUPPORTED_METADATA).issubset(list(record.keys()))
for record in cap_logs
)
for record in cap_logs:
# C6
assert record.get('logger') == sysmetric_logger.logger_name
assert record.get('file_path') == sysmetric_logger.file_path
level_name = logging.getLevelName(sysmetric_logger.logging_level)
assert record.get('level') == level_name.lower()
assert record.get('log_level') == level_name.lower()
assert record.get('level_number') == sysmetric_logger.logging_level
assert isinstance(record.get('timestamp'), str)
assert isinstance(record.get('ID_path'), str)
assert isinstance(record.get('ID_class'), str)
assert isinstance(record.get('ID_function'), str)
assert isinstance(record.get('cpu_percent'), float)
assert isinstance(record.get('memory_total'), int)
assert isinstance(record.get('memory_available'), int)
assert isinstance(record.get('memory_used'), int)
assert isinstance(record.get('memory_free'), int)
assert isinstance(record.get('disk_read_counter'), int)
assert isinstance(record.get('disk_write_counter'), int)
assert isinstance(record.get('disk_read_bytes'), int)
assert isinstance(record.get('disk_write_bytes'), int)
assert isinstance(record.get('net_bytes_sent'), int)
assert isinstance(record.get('net_bytes_recv'), int)
assert isinstance(record.get('net_packets_sent'), int)
assert isinstance(record.get('net_packets_recv'), int)
# Manually clean up process (no dependency on .terminate())
sysmetric_logger.tracker.terminate() # send SIGTERM signal to the child
sysmetric_logger.tracker.join()
exit_code = sysmetric_logger.tracker.exitcode
sysmetric_logger.tracker.close()
sysmetric_logger.tracker = None # Reset state of tracker
# assert exit_code == 0 # successful termination
def test_SysmetricLogger_terminate(sysmetric_logger):
"""
Tests if sysmetric process tracking terminates correctly
# C1: Before tracking is terminated, sysmetric_logger.tracker is not None
# C2: Before tracking is terminated, tracking process is actively running
# C3: After tracking is terminated, sysmetric_logger.tracker is None
# C4: After tracking is terminated, saved tracker is no longer running
# C5: Tracking was terminated gracefully
"""
sysmetric_logger.track(
file_path=file_path,
class_name=class_name,
function_name=function_name,
resolution=POLL_INTERVAL
)
time.sleep(DURATION)
# C1
assert sysmetric_logger.tracker is not None
# C2
assert sysmetric_logger.tracker.is_alive()
saved_tracker = sysmetric_logger.tracker
exit_code = sysmetric_logger.terminate()
# C3
assert sysmetric_logger.tracker is None
# C4
assert saved_tracker._closed
# C5
assert exit_code == 0
@pytest.mark.xfail(raises=RuntimeError)
def test_SysmetricLogger_premature_termination(sysmetric_logger):
"""
Tests if premature termination condition was caught and handled
# C1: Check that 'RuntimeError(f"Attempted to terminate logger XXX before
initialisation!")' is caught, due to Exception being raised when
checking for initialisation state in sysmetric_logger
"""
sysmetric_logger.terminate()
| test_SysmetricLogger_configure_processors |
q44.py | """
q44.py
~~~~~~
Check Balanced: Implement a function to check if a binary tree is balanced. For
the purposes of this question, a balanced tree is defined to be a tree such that
the heights of the two subtrees of any node never differ by more than one.
Hints: #21, #33, #49, #105, #124
"""
import unittest
class Node:
def __init__(self, value):
self.left = None
self.right = None
self.value = value
def __repr__(self):
return f'Node ({self.value})'
def array_to_binary(array, start=None, end=None):
"""Create binary search tree from `array` values via recursion."""
start = 0 if start is None else start
end = len(array) - 1 if end is None else end
if start > end:
return None
mid = (start + end) // 2
node = Node(array[mid])
node.left = array_to_binary(array, start, mid - 1)
node.right = array_to_binary(array, mid + 1, end)
return node
def balanced(node, height=None):
"""Return True if two trees are balanced, False if not"""
if node is None:
print(height)
return height
if height is None:
height = 0
return (balanced(node.left, height + 1) == balanced(node.right, height + 1)) or (balanced(node.left, height + 1) == balanced(node.right, height + 1) + 1) or (balanced(node.left, height + 1) == balanced(node.right, height + 1) - 1)
class TestBalanced(unittest.TestCase):
def setUp(self):
balanced_tree = array_to_binary([3, 5, 6, 9, 17, 23])
unbalanced_tree = array_to_binary([0, 2, 5, 6, 9, 11, 45, 101])
node = unbalanced_tree.right
while node.left or node.right:
print(node)
if node.right:
node = node.right
elif node.left:
node = node.left
node.right = Node(6)
node.right.right = Node(6)
self.data = {
#balanced_tree: True,
unbalanced_tree: False,
}
def | (self):
for tree, expected in self.data.items():
self.assertEqual(balanced(tree), expected)
if __name__ == "__main__":
unittest.main() | test_balanced |
cartesian.rs | #![deny(warnings)]
#![allow(clippy::cognitive_complexity)]
extern crate mpi;
use mpi::traits::*;
fn | () {
let universe = mpi::initialize().unwrap();
let comm = universe.world();
if comm.size() < 4 {
return;
}
let cart_comm = {
let dims = [2, 2];
let periodic = [false, true];
let reorder = true;
if let Some(cart_comm) = comm.create_cartesian_communicator(&dims, &periodic, reorder) {
cart_comm
} else {
assert!(comm.rank() >= 4);
return;
}
};
assert_eq!(2, cart_comm.num_dimensions());
let mpi::topology::CartesianLayout {
dims,
periods,
coords,
} = cart_comm.get_layout();
assert_eq!([2 as mpi::Count, 2], &dims[..]);
assert_eq!([false, true], &periods[..]);
let xrank = coords[0];
let yrank = coords[1];
assert!(0 <= xrank && xrank < 2);
assert!(0 <= yrank && yrank < 2);
let xcomm = cart_comm.subgroup(&[true, false]);
let ycomm = cart_comm.subgroup(&[false, true]);
assert_eq!(2, xcomm.size());
assert_eq!(xrank, xcomm.rank());
assert_eq!(2, ycomm.size());
assert_eq!(yrank, ycomm.rank());
// the first dimension is non-periodic
let (x_src, x_dest) = cart_comm.shift(0, 1);
if xrank == 0 {
assert!(x_src.is_none());
assert!(x_dest.is_some());
let coords = cart_comm.rank_to_coordinates(x_dest.unwrap());
assert_eq!(1, coords[0]);
} else {
assert_eq!(1, xrank);
assert!(x_src.is_some());
assert!(x_dest.is_none());
let coords = cart_comm.rank_to_coordinates(x_src.unwrap());
assert_eq!(0, coords[0]);
}
// the second dimension is periodic
{
let (y_src, y_dest) = cart_comm.shift(1, 1);
assert!(y_src.is_some());
assert!(y_dest.is_some());
let y_src_coords = cart_comm.rank_to_coordinates(y_src.unwrap());
assert_eq!((yrank - 1) & 0b1, y_src_coords[1]);
let y_dest_coords = cart_comm.rank_to_coordinates(y_dest.unwrap());
assert_eq!((yrank + 1) & 0b1, y_dest_coords[1]);
}
// second dimension shift by 2 should be identity
{
let (y_src, y_dest) = cart_comm.shift(1, 2);
assert_eq!(comm.rank(), y_src.unwrap());
assert_eq!(comm.rank(), y_dest.unwrap());
}
}
| main |
task.module.ts | import {NgModule} from '@angular/core';
import {PureTaskListComponent} from './pure-task-list.component';
import {TaskListComponent} from './task-list.component';
import {TaskComponent} from './task.component';
import {CommonModule} from '@angular/common';
import {NgxsModule} from '@ngxs/store';
import {TasksState} from '../state/task.state';
@NgModule({
declarations: [
PureTaskListComponent,
TaskListComponent,
TaskComponent
],
imports: [
CommonModule,
NgxsModule.forFeature([TasksState])
],
exports: [
TaskComponent,
TaskListComponent
]
})
export class | {
}
| TaskModule |
mysql.test.ts | import * as mm from 'mingru-models';
import { itThrows } from 'it-throws';
import user from '../models/user.js';
import * as mr from '../../dist/main.js';
import { eq } from '../assert-aliases.js';
const TimePkg = 'time';
const dialect = new mr.MySQL();
function sqlEq(sql: mm.SQL, value: string) {
eq(mr.sqlIO(sql, null, 'sqlEq').getCodeString(), `"${value}"`);
}
function testDTToGoType(col: mm.Column, type: string, pkg?: string) {
const typeInfo = dialect.colTypeToGoType(col.__type());
eq(typeInfo.fullTypeName, type);
const atomicInfo = mr.getAtomicTypeInfo(typeInfo);
eq(atomicInfo.moduleName, pkg ?? '');
}
it('encodeName', () => {
eq(dialect.encodeName('abc'), '`abc`');
});
it('encodeColumnName', () => {
eq(dialect.encodeColumnName(user.age), '`age`');
eq(dialect.encodeColumnName(user.follower_count), '`follower_c`');
});
it('encodeTableName', () => {
class | extends mm.Table {}
const t = mm.table(Table, { dbName: 'haha' });
eq(dialect.encodeTableName(user), '`user`');
eq(dialect.encodeTableName(t), '`haha`');
});
it('DT to go type', () => {
const tests: Array<[mm.Column, string, unknown]> = [
// PK
[mm.pk(), 'uint64', null],
// Integer
[mm.int(), 'int', null],
[mm.uInt(), 'uint', null],
[mm.bigInt(), 'int64', null],
[mm.uBigInt(), 'uint64', null],
[mm.smallInt(), 'int16', null],
[mm.uSmallInt(), 'uint16', null],
[mm.tinyInt(), 'int8', null],
[mm.uTinyInt(), 'uint8', null],
[mm.bool(), 'bool', null],
// String
[mm.varChar(10), 'string', null],
[mm.char(10), 'string', null],
// Time
[mm.datetime(), 'time.Time', TimePkg],
[mm.date(), 'time.Time', TimePkg],
[mm.timestamp(), 'time.Time', TimePkg],
];
for (const t of tests) {
const column = t[0];
testDTToGoType(column, t[1], t[2] as string);
if (!column.__type().pk) {
column.__type().nullable = true;
testDTToGoType(column, `*${t[1]}`, t[2] as string);
}
}
});
it('DT (not supported)', () => {
const props = new mm.ColumnType(['type1', 'type2']);
itThrows(() => dialect.colTypeToGoType(props), 'Type not supported: [type1,type2]');
});
it('as', () => {
sqlEq(dialect.as(mm.sql`abc`, 'def'), 'abc AS `def`');
});
it('SQL calls', () => {
// eslint-disable-next-line @typescript-eslint/unbound-method
const t = dialect.sqlCall;
eq(t(mm.SQLCallType.dateNow), 'CURDATE');
eq(t(mm.SQLCallType.timeNow), 'CURTIME');
eq(t(mm.SQLCallType.datetimeNow), 'NOW');
eq(t(mm.SQLCallType.utcDateNow), 'UTC_DATE');
eq(t(mm.SQLCallType.utcTimeNow), 'UTC_TIME');
eq(t(mm.SQLCallType.utcDatetimeNow), 'UTC_TIMESTAMP');
eq(t(mm.SQLCallType.count), 'COUNT');
eq(t(mm.SQLCallType.coalesce), 'COALESCE');
eq(t(mm.SQLCallType.avg), 'AVG');
eq(t(mm.SQLCallType.sum), 'SUM');
eq(t(mm.SQLCallType.min), 'MIN');
eq(t(mm.SQLCallType.max), 'MAX');
eq(t(mm.SQLCallType.year), 'YEAR');
eq(t(mm.SQLCallType.month), 'MONTH');
eq(t(mm.SQLCallType.day), 'DAY');
eq(t(mm.SQLCallType.week), 'WEEK');
eq(t(mm.SQLCallType.hour), 'HOUR');
eq(t(mm.SQLCallType.minute), 'MINUTE');
eq(t(mm.SQLCallType.second), 'SECOND');
eq(t(mm.SQLCallType.timestampNow), 'NOW');
});
it('objToSQL', () => {
// null
sqlEq(dialect.objToSQL(null, user), 'NULL');
// number
sqlEq(dialect.objToSQL(-32, user), '-32');
// boolean
sqlEq(dialect.objToSQL(true, user), '1');
sqlEq(dialect.objToSQL(false, user), '0');
// string
sqlEq(dialect.objToSQL('a 123 🛋', user), "'a 123 🛋'");
sqlEq(dialect.objToSQL('', user), "''");
sqlEq(dialect.objToSQL('\'"\\', user), "'''\\\"\\\\'");
// undefined
itThrows(() => dialect.objToSQL(undefined, user), 'Value is undefined');
// Others
itThrows(() => dialect.objToSQL([], user), 'Unsupported type of object "Array"');
});
it('colToSQLType', () => {
// Integers
sqlEq(dialect.colToSQLType(mm.int()), 'INT NOT NULL');
sqlEq(dialect.colToSQLType(mm.int(3)), 'INT(3) NOT NULL');
sqlEq(dialect.colToSQLType(mm.bigInt()), 'BIGINT NOT NULL');
sqlEq(dialect.colToSQLType(mm.bigInt(3)), 'BIGINT(3) NOT NULL');
sqlEq(dialect.colToSQLType(mm.tinyInt()), 'TINYINT NOT NULL');
sqlEq(dialect.colToSQLType(mm.tinyInt(1)), 'TINYINT(1) NOT NULL');
sqlEq(dialect.colToSQLType(mm.smallInt()), 'SMALLINT NOT NULL');
sqlEq(dialect.colToSQLType(mm.smallInt(3)), 'SMALLINT(3) NOT NULL');
sqlEq(dialect.colToSQLType(mm.uInt()), 'INT UNSIGNED NOT NULL');
sqlEq(dialect.colToSQLType(mm.uInt(3)), 'INT(3) UNSIGNED NOT NULL');
sqlEq(dialect.colToSQLType(mm.uBigInt()), 'BIGINT UNSIGNED NOT NULL');
sqlEq(dialect.colToSQLType(mm.uBigInt(3)), 'BIGINT(3) UNSIGNED NOT NULL');
sqlEq(dialect.colToSQLType(mm.uTinyInt()), 'TINYINT UNSIGNED NOT NULL');
sqlEq(dialect.colToSQLType(mm.uTinyInt(1)), 'TINYINT(1) UNSIGNED NOT NULL');
sqlEq(dialect.colToSQLType(mm.uSmallInt()), 'SMALLINT UNSIGNED NOT NULL');
sqlEq(dialect.colToSQLType(mm.uSmallInt(3)), 'SMALLINT(3) UNSIGNED NOT NULL');
sqlEq(dialect.colToSQLType(mm.bool()), 'TINYINT NOT NULL');
// Chars
sqlEq(dialect.colToSQLType(mm.varChar(3)), 'VARCHAR(3) NOT NULL');
sqlEq(dialect.colToSQLType(mm.char(3)), 'CHAR(3) NOT NULL');
sqlEq(dialect.colToSQLType(mm.text()), 'TEXT NOT NULL');
// DateTime
sqlEq(dialect.colToSQLType(mm.date()), 'DATE NOT NULL');
sqlEq(dialect.colToSQLType(mm.date({ defaultToNow: 'utc' })), 'DATE NOT NULL');
sqlEq(dialect.colToSQLType(mm.date({ fsp: 3 })), 'DATE(3) NOT NULL');
sqlEq(dialect.colToSQLType(mm.datetime()), 'DATETIME NOT NULL');
sqlEq(dialect.colToSQLType(mm.datetime({ defaultToNow: 'utc' })), 'DATETIME NOT NULL');
sqlEq(dialect.colToSQLType(mm.datetime({ fsp: 3 })), 'DATETIME(3) NOT NULL');
sqlEq(dialect.colToSQLType(mm.time()), 'TIME NOT NULL');
sqlEq(dialect.colToSQLType(mm.time({ defaultToNow: 'utc' })), 'TIME NOT NULL');
sqlEq(dialect.colToSQLType(mm.time({ fsp: 3 })), 'TIME(3) NOT NULL');
sqlEq(
dialect.colToSQLType(mm.datetime({ defaultToNow: 'server', fsp: 3 })),
'DATETIME(3) NOT NULL',
);
sqlEq(dialect.colToSQLType(mm.time({ defaultToNow: 'server', fsp: 3 })), 'TIME(3) NOT NULL');
// NULL
sqlEq(dialect.colToSQLType(mm.int().nullable), 'INT NULL DEFAULT NULL');
// Default value
sqlEq(dialect.colToSQLType(mm.int().default(43).nullable), 'INT NULL DEFAULT 43');
sqlEq(
dialect.colToSQLType(mm.varChar(23).default('oo').nullable),
"VARCHAR(23) NULL DEFAULT 'oo'",
);
});
| Table |
update.service.ts | import { Injectable } from '@angular/core';
import { Http, RequestOptions, Headers } from '@angular/http';
import { Config } from 'app/config';
import * as semver from 'semver';
import { Observable } from 'rxjs/Observable';
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/do';
import { Subject } from 'rxjs/Subject';
import { AnalyticsService } from 'app/services/analytics.service';
import { shell } from 'electron';
const https = require('https');
const fs = require('fs');
const app = require('electron').remote.app;
const spawn = require('child_process').spawn;
const platform = require('os').platform();
/**
* Auto update for windows (with temp file and old update file deletion)
* Download trigger for mac and linux
*
*/
@Injectable()
export class | {
private tempUpdateFileName = 'update.download';
private updateFileName = {
win32: 'mockoon.setup.%v%.exe',
darwin: 'mockoon.setup.%v%.dmg',
linux: 'mockoon-%v%-x86_64.AppImage',
};
public updateAvailable: Subject<any> = new Subject();
private nextVersion: string;
private updateFilePath = app.getPath('userData') + '/';
constructor(private http: Http, private analyticsService: AnalyticsService) {
// always remove temp file
this.removeTempFile();
// avoid cache from AWS (cloudflare has a page rule for this)
const headers = new Headers();
headers.append('Cache-Control', 'no-cache');
headers.append('Pragma', 'no-cache');
const options = new RequestOptions({ headers });
if (platform === 'darwin' || platform === 'linux' || platform === 'win32') {
// request the updates.json file
this.http.get(Config.updatesUrl, options)
.map(response => response.json())
.subscribe((updates) => {
// check if version is ahead and trigger something depending on platform
if (semver.gt(updates[platform].version, Config.version)) {
this.nextVersion = updates[platform].version;
// only trigger download for windows, for other just inform
if (platform === 'win32') {
const updateFileName = this.updateFileName[platform].replace('%v%', this.nextVersion);
// if already have an update file
if (fs.existsSync(this.updateFilePath + updateFileName)) {
this.updateAvailable.next();
} else {
this.fileDownload(updates[platform].file, this.updateFilePath, updateFileName, () => {
this.updateAvailable.next();
});
}
} else {
this.updateAvailable.next();
}
} else {
this.removeOldUpdate();
}
});
}
}
/**
* Launch setup file and close the application
*/
public applyUpdate() {
if (this.updateAvailable) {
// launch exe detached and close app
if (platform === 'win32') {
spawn(this.updateFilePath + this.updateFileName[platform].replace('%v%', this.nextVersion), ['--updated'], { detached: true, stdio: 'ignore' }).unref();
app.quit();
} else if (platform === 'darwin' || platform === 'linux') {
shell.openExternal(Config.releasesUrl + this.updateFileName[platform].replace('%v%', this.nextVersion));
}
}
}
/**
* Generic file downloader
*/
private fileDownload(url: string, destination: string, filename: string, callback: Function) {
const file = fs.createWriteStream(destination + this.tempUpdateFileName);
const request = https.get(url, (response) => {
response.pipe(file);
file.on('finish', () => {
file.close(() => {
// rename when successful
fs.rename(destination + this.tempUpdateFileName, destination + filename, callback);
});
});
}).on('error', (error) => {
fs.unlink(destination + this.tempUpdateFileName);
});
}
/**
* Remove update file corresponding to current version (for win only)
*/
private removeOldUpdate() {
if (platform === 'win32') {
fs.unlink(this.updateFilePath + this.updateFileName[platform].replace('%v%', Config.version), () => { });
}
}
/**
* Remove the temporary update.download file (for win only)
*/
private removeTempFile() {
if (platform === 'win32') {
fs.unlink(this.updateFilePath + this.tempUpdateFileName, () => { });
}
}
}
| UpdateService |
testing_slot_clock.rs | use super::SlotClock;
use std::sync::RwLock;
use std::time::Duration;
use types::Slot;
#[derive(Debug, PartialEq)]
pub enum Error {}
/// Determines the present slot based upon the present system time.
pub struct TestingSlotClock {
slot: RwLock<Slot>,
}
impl TestingSlotClock {
pub fn set_slot(&self, slot: u64) {
*self.slot.write().expect("TestingSlotClock poisoned.") = Slot::from(slot);
}
pub fn advance_slot(&self) {
self.set_slot(self.present_slot().unwrap().unwrap().as_u64() + 1) | }
}
impl SlotClock for TestingSlotClock {
type Error = Error;
/// Create a new `TestingSlotClock` at `genesis_slot`.
fn new(genesis_slot: Slot, _genesis_seconds: u64, _slot_duration_seconds: u64) -> Self {
TestingSlotClock {
slot: RwLock::new(genesis_slot),
}
}
fn present_slot(&self) -> Result<Option<Slot>, Error> {
let slot = *self.slot.read().expect("TestingSlotClock poisoned.");
Ok(Some(slot))
}
/// Always returns a duration of 1 second.
fn duration_to_next_slot(&self) -> Result<Option<Duration>, Error> {
Ok(Some(Duration::from_secs(1)))
}
fn slot_duration_millis(&self) -> u64 {
0
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_slot_now() {
let null = 0;
let clock = TestingSlotClock::new(Slot::new(10), null, null);
assert_eq!(clock.present_slot(), Ok(Some(Slot::new(10))));
clock.set_slot(123);
assert_eq!(clock.present_slot(), Ok(Some(Slot::new(123))));
}
} | |
633d1608.f62874c0.js | (window.webpackJsonp=window.webpackJsonp||[]).push([[100],{157:function(e,t,r){"use strict";r.r(t),r.d(t,"frontMatter",(function(){return s})),r.d(t,"metadata",(function(){return a})),r.d(t,"rightToc",(function(){return l})),r.d(t,"default",(function(){return p}));var n=r(2),o=r(6),i=(r(0),r(317)),s={id:"browser-redirect-flow-completion",title:"HTTP Redirection Configuration"},a={unversionedId:"concepts/browser-redirect-flow-completion",id:"version-v0.4/concepts/browser-redirect-flow-completion",isDocsHomePage:!1,title:"HTTP Redirection Configuration",description:"Self-Service flows such as Login, Registration, Updating Settings support two",source:"@site/versioned_docs/version-v0.4/concepts/browser-redirect-flow-completion.mdx",slug:"/concepts/browser-redirect-flow-completion",permalink:"/kratos/docs/concepts/browser-redirect-flow-completion",editUrl:"https://github.com/ory/kratos/edit/master/docs/versioned_docs/version-v0.4/concepts/browser-redirect-flow-completion.mdx",version:"v0.4",lastUpdatedBy:"aeneasr",lastUpdatedAt:1594198226,sidebar:"version-v0.4/docs",previous:{title:"Social Sign In, OpenID Connect, and OAuth2",permalink:"/kratos/docs/concepts/credentials/openid-connect-oidc-oauth2"},next:{title:"Out-of-band communication via E-Mail and SMS",permalink:"/kratos/docs/concepts/email-sms"}},l=[{value:"Redirection",id:"redirection",children:[{value:"Post-Login Redirection",id:"post-login-redirection",children:[]},{value:"Post-Registration Redirection",id:"post-registration-redirection",children:[]},{value:"Post-Settings Redirection",id:"post-settings-redirection",children:[]}]},{value:"JSON",id:"json",children:[]}],c={rightToc:l};function | (e){var t=e.components,r=Object(o.a)(e,["components"]);return Object(i.b)("wrapper",Object(n.a)({},c,r,{components:t,mdxType:"MDXLayout"}),Object(i.b)("p",null,"Self-Service flows such as Login, Registration, Updating Settings support two\nsuccessful response modes:"),Object(i.b)("ul",null,Object(i.b)("li",{parentName:"ul"},"For browsers, the response will be a ",Object(i.b)("a",Object(n.a)({parentName:"li"},{href:"#redirection"}),"redirection"),"."),Object(i.b)("li",{parentName:"ul"},"For API clients (this includes AJAX) the response will be in ",Object(i.b)("a",Object(n.a)({parentName:"li"},{href:"#json"}),"JSON"),".")),Object(i.b)("h2",{id:"redirection"},"Redirection"),Object(i.b)("p",null,"Browser requests, identified by the ",Object(i.b)("inlineCode",{parentName:"p"},"Accept: text/html")," header, complete with a\nredirection flow. If no redirection URL is set for the flow, the Default\nRedirect URL will be used for most flows (e.g. login, registration):"),Object(i.b)("pre",null,Object(i.b)("code",Object(n.a)({parentName:"pre"},{className:"language-yaml",metastring:'file="path/to/my/kratos.config.yml"',file:'"path/to/my/kratos.config.yml"'}),"selfservice:\n default_browser_return_url: https://always-end-up-here-per-default/\n")),Object(i.b)("p",null,"It is possible to specify a redirect URL per Self-Service Flow:"),Object(i.b)("pre",null,Object(i.b)("code",Object(n.a)({parentName:"pre"},{className:"language-yaml",metastring:'file="path/to/my/kratos.config.yml"',file:'"path/to/my/kratos.config.yml"'}),"selfservice:\n flows:\n login:\n after:\n default_browser_return_url: https://end-up-here-after-login/\n registration:\n after:\n default_browser_return_url: https://end-up-here-after-registration/\n # ...\n")),Object(i.b)("p",null,"You may also set redirect URLs per strategy (overrides\n",Object(i.b)("inlineCode",{parentName:"p"},"selfservice.<login|registration|...>.default_return_to"),"):"),Object(i.b)("pre",null,Object(i.b)("code",Object(n.a)({parentName:"pre"},{className:"language-yaml",metastring:'file="path/to/my/kratos.config.yml"',file:'"path/to/my/kratos.config.yml"'}),"selfservice:\n flows:\n login:\n after:\n default_browser_return_url: https://this-is-overridden-by-password/\n password:\n default_browser_return_url: https://end-up-here-after-login-with-password/\n # ...\n")),Object(i.b)("p",null,"It is also possible to redirect someone back to the original URL. For example,\nif a user requests ",Object(i.b)("inlineCode",{parentName:"p"},"https://www.myapp.com/blog/write")," but is not logged in, we\nwant the user to end up at that page after login. To achieve that, you append\n",Object(i.b)("inlineCode",{parentName:"p"},"?return_to=https://www.myapp.com/blog/write")," when initializing the Login /\nRegistration /Settings flow."),Object(i.b)("p",null,"Because ORY Kratos prevents Open Redirect Attacks, you need to whitelist the\ndomain in your ORY Kratos config:"),Object(i.b)("pre",null,Object(i.b)("code",Object(n.a)({parentName:"pre"},{className:"language-yaml",metastring:'file="path/to/my/kratos.config.yml"',file:'"path/to/my/kratos.config.yml"'}),"selfservice:\n whitelisted_return_urls:\n - https://www.myapp.com/\n")),Object(i.b)("h3",{id:"post-login-redirection"},"Post-Login Redirection"),Object(i.b)("p",null,"Post-login redirection considers the following configuration keys:"),Object(i.b)("pre",null,Object(i.b)("code",Object(n.a)({parentName:"pre"},{className:"language-yaml",metastring:'file="path/to/my/kratos.config.yml"',file:'"path/to/my/kratos.config.yml"'}),"selfservice:\n default_browser_return_url: https://end-up-here-per-default/\n flows:\n login:\n after:\n # overrides url.default_browser_return_url\n default_browser_return_url: https://this-is-overridden-by-password/\n password:\n # overrides selfservice.login.after.default_browser_return_url\n default_browser_return_url: https://end-up-here-after-login-with-password/\n")),Object(i.b)("h3",{id:"post-registration-redirection"},"Post-Registration Redirection"),Object(i.b)("p",null,"Post-login redirection considers the following configuration keys:"),Object(i.b)("pre",null,Object(i.b)("code",Object(n.a)({parentName:"pre"},{className:"language-yaml",metastring:'file="path/to/my/kratos.config.yml"',file:'"path/to/my/kratos.config.yml"'}),"selfservice:\n default_browser_return_url: https://end-up-here-per-default/\n flows:\n registration:\n after:\n # overrides url.default_browser_return_url\n default_browser_return_url: https://this-is-overridden-by-password/\n password:\n # overrides selfservice.registration.after.default_browser_return_url\n default_browser_return_url: https://end-up-here-after-registration-with-password/\n")),Object(i.b)("h3",{id:"post-settings-redirection"},"Post-Settings Redirection"),Object(i.b)("p",null,"Post-settings redirection ",Object(i.b)("strong",{parentName:"p"},"does not use")," the ",Object(i.b)("inlineCode",{parentName:"p"},"urls.default_redirect_to"),"\nconfiguration key. Instead the redirect ends at the same Settings UI with the\nsame Settings Request ID and key ",Object(i.b)("inlineCode",{parentName:"p"},"update_successful")," set to ",Object(i.b)("inlineCode",{parentName:"p"},"true"),". If the\nlisted keys are set, the redirection will end up at the specified values:"),Object(i.b)("pre",null,Object(i.b)("code",Object(n.a)({parentName:"pre"},{className:"language-yaml",metastring:'file="path/to/my/kratos.config.yml"',file:'"path/to/my/kratos.config.yml"'}),"selfservice:\n flows:\n settings:\n after:\n # overrides url.default_redirect_to\n default_redirect_to: https://this-is-overridden-by-password/\n password:\n # overrides selfservice.settings.after.default_redirect_to\n default_redirect_to: https://end-up-here-after-settings-with-password/\n")),Object(i.b)("h2",{id:"json"},"JSON"),Object(i.b)("p",null,"This feature is currently in prototype phase and will be documented at a later\nstage."))}p.isMDXComponent=!0},317:function(e,t,r){"use strict";r.d(t,"a",(function(){return d})),r.d(t,"b",(function(){return b}));var n=r(0),o=r.n(n);function i(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function s(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function a(e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?s(Object(r),!0).forEach((function(t){i(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):s(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function l(e,t){if(null==e)return{};var r,n,o=function(e,t){if(null==e)return{};var r,n,o={},i=Object.keys(e);for(n=0;n<i.length;n++)r=i[n],t.indexOf(r)>=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n<i.length;n++)r=i[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var c=o.a.createContext({}),p=function(e){var t=o.a.useContext(c),r=t;return e&&(r="function"==typeof e?e(t):a(a({},t),e)),r},d=function(e){var t=p(e.components);return o.a.createElement(c.Provider,{value:t},e.children)},u={inlineCode:"code",wrapper:function(e){var t=e.children;return o.a.createElement(o.a.Fragment,{},t)}},f=o.a.forwardRef((function(e,t){var r=e.components,n=e.mdxType,i=e.originalType,s=e.parentName,c=l(e,["components","mdxType","originalType","parentName"]),d=p(r),f=n,b=d["".concat(s,".").concat(f)]||d[f]||u[f]||i;return r?o.a.createElement(b,a(a({ref:t},c),{},{components:r})):o.a.createElement(b,a({ref:t},c))}));function b(e,t){var r=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var i=r.length,s=new Array(i);s[0]=f;var a={};for(var l in t)hasOwnProperty.call(t,l)&&(a[l]=t[l]);a.originalType=e,a.mdxType="string"==typeof e?e:n,s[1]=a;for(var c=2;c<i;c++)s[c]=r[c];return o.a.createElement.apply(null,s)}return o.a.createElement.apply(null,r)}f.displayName="MDXCreateElement"}}]); | p |
user_follow_dao_test.go | package dao
import (
"context"
"flag"
"testing"
logger "github.com/1024casts/snake/pkg/log"
"github.com/1024casts/snake/internal/model"
"github.com/1024casts/snake/pkg/conf"
"github.com/1024casts/snake/pkg/testing/lich"
"github.com/spf13/pflag"
)
var (
d *Dao
cfgFile = pflag.StringP("config", "c", "", "snake config file path.")
cfg *conf.Config
)
func TestMain(m *testing.M) {
pflag.Parse()
*cfgFile = "../../config/config.yaml"
flag.Set("f", "../../test/docker-compose.yaml")
flag.Parse()
cfg, err := conf.Init(*cfgFile)
if err != nil {
panic(err)
}
if err := lich.Setup(); err != nil {
panic(err)
}
defer lich.Teardown()
// init log
logger.Init(&cfg.Logger)
// init db
model.Init(&cfg.MySQL)
d = New(model.GetDB())
if code := m.Run(); code != 0 {
panic(code)
}
}
func | (t *testing.T) {
followers, err := d.GetFollowerUserList(context.Background(), 1, 0, 1)
if err != nil {
t.Error(err)
}
if len(followers) > 0 {
t.Log("follower num is: ", len(followers))
}
}
| TestDao_GetFollowerUserList |
rule.py | from ruleta.exceptions import NoActionException
from collections import namedtuple
class Rule(namedtuple("Rule", ["if_", "then_"]) ):
__slots__ = []
def __call__(self, input_):
| if ( self.if_(input_)):
return self.then_(input_)
else:
raise NoActionException() |
|
watch.go | package component
import (
"fmt"
"net/url"
"os"
"runtime"
"github.com/pkg/errors"
"github.com/redhat-developer/odo/pkg/log"
appCmd "github.com/redhat-developer/odo/pkg/odo/cli/application"
projectCmd "github.com/redhat-developer/odo/pkg/odo/cli/project"
"github.com/redhat-developer/odo/pkg/odo/util/completion"
ktemplates "k8s.io/kubernetes/pkg/kubectl/cmd/templates"
"github.com/golang/glog"
"github.com/redhat-developer/odo/pkg/odo/genericclioptions"
"github.com/redhat-developer/odo/pkg/component"
odoutil "github.com/redhat-developer/odo/pkg/odo/util"
"github.com/redhat-developer/odo/pkg/util"
"github.com/spf13/cobra"
)
// RecommendedWatchCommandName is the recommended watch command name
const RecommendedWatchCommandName = "watch"
var watchLongDesc = ktemplates.LongDesc(`Watch for changes, update component on change.`)
var watchExample = ktemplates.Examples(` # Watch for changes in directory for current component
%[1]s
# Watch for changes in directory for component called frontend
%[1]s frontend
`)
// WatchOptions contains attributes of the watch command
type WatchOptions struct {
ignores []string
delay int
componentName string
componentSourceType string
watchPath string
*genericclioptions.Context
}
// NewWatchOptions returns new instance of WatchOptions
func NewWatchOptions() *WatchOptions {
return &WatchOptions{}
}
// Complete completes watch args
func (wo *WatchOptions) Complete(name string, cmd *cobra.Command, args []string) (err error) {
wo.Context = genericclioptions.NewContext(cmd)
if len(args) == 0 {
glog.V(4).Info("No component name passed, assuming current component")
wo.componentName = wo.Context.Component()
} else {
wo.componentName = args[0]
}
sourceType, sourcePath, err := component.GetComponentSource(wo.Context.Client, wo.componentName, wo.Context.Application)
if err != nil {
return errors.Wrapf(err, "Unable to get source for %s component.", wo.componentName)
}
u, err := url.Parse(sourcePath)
if err != nil {
return errors.Wrapf(err, "Unable to parse source %s from component %s.", sourcePath, wo.componentName)
}
if u.Scheme != "" && u.Scheme != "file" {
log.Errorf("Component %s has invalid source path %s.", wo.componentName, u.Scheme)
os.Exit(1)
}
wo.watchPath = util.ReadFilePath(u, runtime.GOOS)
wo.componentSourceType = sourceType
if len(wo.ignores) == 0 {
rules, err := util.GetIgnoreRulesFromDirectory(wo.watchPath)
if err != nil {
odoutil.LogErrorAndExit(err, "")
}
wo.ignores = append(wo.ignores, rules...)
}
return
}
// Validate validates the watch parameters
func (wo *WatchOptions) Validate() (err error) {
// Validate component name is non-empty
if wo.componentName == "" {
return fmt.Errorf(`No component is set as active.
Use 'odo component set <component name> to set and existing component as active or call this command with component name as and argument.
`)
}
// Validate component path existence and accessibility permissions for odo
if _, err := os.Stat(wo.watchPath); err != nil {
return errors.Wrapf(err, "Cannot watch %s", wo.watchPath)
}
// Validate source of component is either local source or binary path until git watch is supported
if wo.componentSourceType != "binary" && wo.componentSourceType != "local" {
return fmt.Errorf("Watch is supported by binary and local components only and source type of component %s is %s", wo.componentName, wo.componentSourceType)
}
// Delay interval cannot be -ve
if wo.delay < 0 {
return fmt.Errorf("Delay cannot be lesser than 0 and delay=0 means changes will be pushed as soon as they are detected which can cause performance issues")
}
// Print a debug message warning user if delay is set to 0
if wo.delay == 0 {
glog.V(4).Infof("delay=0 means changes will be pushed as soon as they are detected which can cause performance issues")
}
return
}
// Run has the logic to perform the required actions as part of command
func (wo *WatchOptions) Run() (err error) {
err = component.WatchAndPush(
wo.Context.Client,
os.Stdout,
component.WatchParameters{
ComponentName: wo.componentName,
ApplicationName: wo.Context.Application,
Path: wo.watchPath,
FileIgnores: wo.ignores,
PushDiffDelay: wo.delay,
StartChan: nil,
ExtChan: make(chan bool),
WatchHandler: component.PushLocal,
},
)
if err != nil {
return errors.Wrapf(err, "Error while trying to watch %s", wo.watchPath)
}
return
}
// NewCmdWatch implements the watch odo command
func | (name, fullName string) *cobra.Command {
wo := NewWatchOptions()
var watchCmd = &cobra.Command{
Use: fmt.Sprintf("%s [component name]", name),
Short: "Watch for changes, update component on change",
Long: watchLongDesc,
Example: fmt.Sprintf(watchExample, fullName),
Args: cobra.MaximumNArgs(1),
Run: func(cmd *cobra.Command, args []string) {
odoutil.LogErrorAndExit(wo.Complete(name, cmd, args), "")
odoutil.LogErrorAndExit(wo.Validate(), "")
odoutil.LogErrorAndExit(wo.Run(), "")
},
}
watchCmd.Flags().StringSliceVar(&wo.ignores, "ignore", []string{}, "Files or folders to be ignored via glob expressions.")
watchCmd.Flags().IntVar(&wo.delay, "delay", 1, "Time in seconds between a detection of code change and push.delay=0 means changes will be pushed as soon as they are detected which can cause performance issues")
// Add a defined annotation in order to appear in the help menu
watchCmd.Annotations = map[string]string{"command": "component"}
watchCmd.SetUsageTemplate(odoutil.CmdUsageTemplate)
//Adding `--application` flag
appCmd.AddApplicationFlag(watchCmd)
//Adding `--project` flag
projectCmd.AddProjectFlag(watchCmd)
completion.RegisterCommandHandler(watchCmd, completion.ComponentNameCompletionHandler)
return watchCmd
}
| NewCmdWatch |
forms.py | from django import forms
from django.forms import ModelForm, Form
from census_paleo.models import occurrence, taxonomy, measured_values, specimen
from ajax_select import make_ajax_field
class OccurrenceForm(ModelForm):
taxon = make_ajax_field(occurrence, "taxon", "taxonLookup")
ref = make_ajax_field(occurrence, "ref", "referenceLookup")
location = make_ajax_field(occurrence, "location", "locationLookup")
notes = forms.CharField(widget=forms.Textarea, required=False)
issue = forms.BooleanField(required=False, initial=False)
class Meta:
model = occurrence | reference = make_ajax_field(measured_values, "reference", "referenceLookup")
class Meta:
model = measured_values
fields = "__all__"
class TaxonForm(ModelForm):
class Meta:
model = taxonomy
fields = "__all__"
class GetTaxonInfoForm(Form):
taxon = make_ajax_field(occurrence, "taxon", "taxonLookup")
class CSVUploadForm(Form):
csvFile=forms.FileField(required=True, label="CSV file")
#taxonomyColumnName = forms.CharField(required=True,label="Taxonomy Column Name", initial='taxon')
#referenceColumnName = forms.CharField(required=True, label="Reference Column Name", initial='ref')
#locationColumnName = forms.CharField(required=True, label='Location Column Name', initial='location')
#presenceAbsenceData = forms.BooleanField(required=False, label="Presence Absence Data") | fields = "__all__"
class MeasuredValueForm(ModelForm):
specimen = make_ajax_field(measured_values, "specimen", "specimenLookup") |
service_watch.go | package k8swatch
import (
"context"
"sync"
"github.com/pkg/errors"
v1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/types"
"github.com/windmilleng/tilt/internal/k8s"
"github.com/windmilleng/tilt/internal/store"
"github.com/windmilleng/tilt/pkg/logger"
"github.com/windmilleng/tilt/pkg/model"
)
type ServiceWatcher struct {
kCli k8s.Client
ownerFetcher k8s.OwnerFetcher
watching bool
nodeIP k8s.NodeIP
mu sync.RWMutex
knownDeployedUIDs map[types.UID]model.ManifestName
knownServices map[types.UID]*v1.Service
}
func NewServiceWatcher(kCli k8s.Client, ownerFetcher k8s.OwnerFetcher, nodeIP k8s.NodeIP) *ServiceWatcher {
return &ServiceWatcher{
kCli: kCli,
ownerFetcher: ownerFetcher,
nodeIP: nodeIP,
knownDeployedUIDs: make(map[types.UID]model.ManifestName),
knownServices: make(map[types.UID]*v1.Service),
}
}
func (w *ServiceWatcher) diff(st store.RStore) watcherTaskList {
state := st.RLockState()
defer st.RUnlockState()
w.mu.RLock()
defer w.mu.RUnlock()
taskList := createWatcherTaskList(state, w.knownDeployedUIDs)
if w.watching {
taskList.needsWatch = false
}
return taskList
}
func (w *ServiceWatcher) OnChange(ctx context.Context, st store.RStore) {
taskList := w.diff(st)
if taskList.needsWatch {
w.setupWatch(ctx, st)
}
if len(taskList.newUIDs) > 0 {
w.setupNewUIDs(ctx, st, taskList.newUIDs)
} | w.watching = true
ch, err := w.kCli.WatchServices(ctx, k8s.ManagedByTiltSelector())
if err != nil {
err = errors.Wrap(err, "Error watching services. Are you connected to kubernetes?\n")
st.Dispatch(store.NewErrorAction(err))
return
}
go w.dispatchServiceChangesLoop(ctx, ch, st)
}
// When new UIDs are deployed, go through all our known services and dispatch
// new events. This handles the case where we get the Service change event
// before the deploy id shows up in the manifest, which is way more common than
// you would think.
func (w *ServiceWatcher) setupNewUIDs(ctx context.Context, st store.RStore, newUIDs map[types.UID]model.ManifestName) {
w.mu.Lock()
defer w.mu.Unlock()
for uid, mn := range newUIDs {
w.knownDeployedUIDs[uid] = mn
service, ok := w.knownServices[uid]
if !ok {
continue
}
err := DispatchServiceChange(st, service, mn, w.nodeIP)
if err != nil {
logger.Get(ctx).Infof("error resolving service url %s: %v", service.Name, err)
}
}
}
// Record the service update, and return true if this is newer than
// the state we already know about.
func (w *ServiceWatcher) recordServiceUpdate(service *v1.Service) bool {
w.mu.Lock()
defer w.mu.Unlock()
uid := service.UID
oldService, ok := w.knownServices[uid]
// In "real" code, if we get two service updates with the same resource version,
// we can safely ignore the new one. But dispatching a spurious event
// in this case makes testing much easier, because the test harness doesn't need
// to keep track of ResourceVersions
olderThanKnown := ok && oldService.ResourceVersion > service.ResourceVersion
if olderThanKnown {
return false
}
w.knownServices[uid] = service
return true
}
// Match up the service update to a manifest.
//
// The division between triageServiceUpdate and recordServiceUpdate is a bit artificial,
// but is designed this way to be consistent with PodWatcher and EventWatchManager.
func (w *ServiceWatcher) triageServiceUpdate(service *v1.Service) model.ManifestName {
w.mu.Lock()
defer w.mu.Unlock()
uid := service.UID
manifestName, ok := w.knownDeployedUIDs[uid]
if !ok {
return ""
}
return manifestName
}
func (w *ServiceWatcher) dispatchServiceChangesLoop(ctx context.Context, ch <-chan *v1.Service, st store.RStore) {
for {
select {
case service, ok := <-ch:
if !ok {
return
}
ok = w.recordServiceUpdate(service)
if !ok {
continue
}
manifestName := w.triageServiceUpdate(service)
if manifestName == "" {
continue
}
err := DispatchServiceChange(st, service, manifestName, w.nodeIP)
if err != nil {
logger.Get(ctx).Infof("error resolving service url %s: %v", service.Name, err)
}
case <-ctx.Done():
return
}
}
}
func DispatchServiceChange(st store.RStore, service *v1.Service, mn model.ManifestName, ip k8s.NodeIP) error {
url, err := k8s.ServiceURL(service, ip)
if err != nil {
return err
}
st.Dispatch(NewServiceChangeAction(service, mn, url))
return nil
} | }
func (w *ServiceWatcher) setupWatch(ctx context.Context, st store.RStore) { |
CodeRow.tsx | // Copyright 2017-2020 @polkadot/react-components authors & contributors
// This software may be modified and distributed under the terms
// of the Apache-2.0 license. See the LICENSE file for details.
/* eslint-disable camelcase */
import { I18nProps } from '@polkadot/react-components/types';
import { CodeStored } from '@polkadot/app-contracts/types';
import React from 'react';
import styled from 'styled-components';
import { createType } from '@polkadot/types';
import { registry } from '@polkadot/react-api';
import { withMulti } from '@polkadot/react-api/hoc';
import { classes, toShortAddress } from '@polkadot/react-components/util';
import Row, { RowProps, RowState, styles } from '@polkadot/react-components/Row';
import { CopyButton, Icon } from '@polkadot/react-components';
import contracts from './store';
import Messages from './Messages';
import translate from './translate';
interface Props extends I18nProps, RowProps {
code: CodeStored;
withMessages?: boolean;
}
interface State extends RowState {
codeHash: string;
}
const DEFAULT_HASH = '0x';
const DEFAULT_NAME = '<unknown>';
const CodeIcon = styled.div`
& {
margin-right: 1em;
background: #eee;
color: #666;
width: 4rem;
height: 5rem;
padding: 0.5rem;
display: flex;
justify-content: flex-end;
align-items: flex-end;
}
`;
const DEFAULT_ADDR = '5'.padEnd(16, 'x');
class CodeRow extends Row<Props, State> {
public state: State;
constructor (props: Props) {
super(props);
this.state = this.createState();
}
public static getDerivedStateFromProps ({ accountsInfo, code: { json } }: Props, prevState: State): State | null {
const codeHash = json.codeHash || DEFAULT_HASH;
const name = json.name || DEFAULT_NAME;
const tags = json.tags || [];
const { accountId } = accountsInfo || {};
const address = accountId
? accountId.toString()
: DEFAULT_ADDR;
const state: Partial<State> = { tags };
let hasChanged = false;
if (codeHash !== prevState.codeHash) {
state.codeHash = codeHash;
hasChanged = true;
}
if (!prevState.isEditingName && name !== prevState.name) {
state.name = name;
hasChanged = true;
}
if (address !== prevState.address) {
state.address = address;
hasChanged = true;
}
return hasChanged
? state as State | }
public render (): React.ReactNode {
const { className = '', isInline } = this.props;
return (
<div className={classes('ui--Row', isInline && 'inline', className)}>
<div className='ui--Row-base'>
{this.renderIcon()}
<div className='ui--Row-details'>
{this.renderName()}
{this.renderCodeHash()}
{this.renderTags()}
</div>
{this.renderButtons()}
</div>
{this.renderMessages()}
{this.renderChildren()}
</div>
);
}
private createState (): State {
const { code: { json: { codeHash = DEFAULT_HASH, name = DEFAULT_NAME, tags = [] } } } = this.props;
return {
address: DEFAULT_ADDR,
codeHash,
isEditingName: false,
isEditingTags: false,
name,
tags
};
}
protected renderCodeHash (): React.ReactNode {
const { codeHash } = this.state;
return (
<>
<div className='ui--Row-name'>
{name}
</div>
<div className='ui--Row-accountId'>
<CopyButton
isAddress
value={codeHash}
>
<span>{toShortAddress(codeHash)}</span>
</CopyButton>
</div>
</>
);
}
protected renderButtons (): React.ReactNode {
const { buttons } = this.props;
if (!buttons) {
return null;
}
return (
<div className='ui--Row-buttons'>
{buttons}
</div>
);
}
protected renderIcon (): React.ReactNode {
return (
<CodeIcon>
<Icon
name='code'
size='large'
/>
</CodeIcon>
);
}
protected renderMessages (): React.ReactNode {
const { code: { contractAbi }, withMessages } = this.props;
if (!withMessages || !contractAbi) {
return null;
}
return (
<Messages
contractAbi={contractAbi}
isRemovable
/>
);
}
protected saveName = async (): Promise<void> => {
const { codeHash, name } = this.state;
const trimmedName = name.trim();
// Save only if the name was changed or if it's no empty.
if (trimmedName && codeHash) {
await contracts.saveCode(createType(registry, 'Hash', codeHash), { name });
this.setState({ isEditingName: false });
}
}
protected saveTags = async (): Promise<void> => {
const { codeHash, tags } = this.state;
if (codeHash) {
await contracts.saveCode(createType(registry, 'Hash', codeHash), { tags });
this.setState({ isEditingTags: false });
}
}
}
export default withMulti(
styled(CodeRow as React.ComponentClass<Props>)`
${styles}
`,
translate
); | : null; |
config.js | // Use this file to change prototype configuration.
// Note: prototype config can be overridden using environment variables (eg on heroku)
module.exports = {
// Service name used in header. Eg: 'Renew your passport'
serviceName: 'Service name goes here',
// Default port that prototype runs on
port: '3000',
// Enable or disable password protection on production
useAuth: 'false',
// Automatically stores form data, and send to all views
useAutoStoreData: 'true',
// Enable or disable built-in docs and examples.
useDocumentation: 'true',
// Force HTTP to redirect to HTTPs on production
useHttps: 'true',
// Cookie warning - update link to service's cookie page.
cookieText: 'GOV.UK uses cookies to make the site simpler. <a href="#">Find out more about cookies</a>', |
// Enable or disable Browser Sync
useBrowserSync: 'true'
} | |
0.0.1-admins.js | /**
* This script automatically creates a default Admin user when an
* empty database is used for the first time. You can use this
* technique to insert data into any List you have defined.
*/ | ]
}; |
exports.create = {
User: [
{ 'name.first': 'Admin', 'name.last': 'User', email: '[email protected]', password: 'admin', isAdmin: true } |
box1.rs | // box1.rs
//
// At compile time, Rust needs to know how much space a type takes up. This becomes problematic
// for recursive types, where a value can have as part of itself another value of the same type.
// To get around the issue, we can use a `Box` - a smart pointer used to store data on the heap,
// which also allows us to wrap a recursive type.
//
// The recursive type we're implementing in this exercise is the `cons list` - a data structure
// frequently found in functional programming languages. Each item in a cons list contains two
// elements: the value of the current item and the next item. The last item is a value called `Nil`.
//
// Step 1: use a `Box` in the enum definition to make the code compile
// Step 2: create both empty and non-empty cons lists by replacing `unimplemented!()`
//
// Note: the tests should not be changed
//
// Execute `rustlings hint box1` for hints :)
#[derive(PartialEq, Debug)]
pub enum List {
Cons(i32, Box<List>),
Nil,
}
fn main() {
println!("This is an empty cons list: {:?}", create_empty_list());
println!(
"This is a non-empty cons list: {:?}",
create_non_empty_list()
);
}
pub fn | () -> List {
List::Nil
}
pub fn create_non_empty_list() -> List {
List::Cons(3, Box::new(List::Cons(5, Box::new(List::Nil))))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_create_empty_list() {
assert_eq!(List::Nil, create_empty_list())
}
#[test]
fn test_create_non_empty_list() {
assert_ne!(create_empty_list(), create_non_empty_list())
}
}
| create_empty_list |
facade.py | from decouple import config
from wordpress import API
# Configurações do WooCommerce
consumer_key = config("WC_CK", False)
consumer_secret = config("WC_CS", False)
woo_commerce_url = config("WC_URL", False)
wpapi = API(
url=woo_commerce_url,
api="wp-json",
version='wc/v3',
consumer_key=consumer_key,
consumer_secret=consumer_secret,
timeout=10
)
def ge | :
orders = wpapi.get("orders")
return orders.json()
| t_orders() |
iterators.py | # Copyright (C) 2001-2006 Python Software Foundation
# Author: Barry Warsaw
# Contact: [email protected]
"""Various types of useful iterators and generators."""
__all__ = [
'body_line_iterator',
'typed_subpart_iterator',
'walk',
# Do not include _structure() since it's part of the debugging API.
]
import sys
from cStringIO import StringIO
# This function will become a method of the Message class
def walk(self):
"""Walk over the message tree, yielding each subpart.
The walk is performed in depth-first order. This method is a
generator.
"""
yield self
if self.is_multipart():
for subpart in self.get_payload():
for subsubpart in subpart.walk():
yield subsubpart
# These two functions are imported into the Iterators.py interface module.
def | (msg, decode=False):
"""Iterate over the parts, returning string payloads line-by-line.
Optional decode (default False) is passed through to .get_payload().
"""
for subpart in msg.walk():
payload = subpart.get_payload(decode=decode)
if isinstance(payload, basestring):
for line in StringIO(payload):
yield line
def typed_subpart_iterator(msg, maintype='text', subtype=None):
"""Iterate over the subparts with a given MIME type.
Use `maintype' as the main MIME type to match against; this defaults to
"text". Optional `subtype' is the MIME subtype to match against; if
omitted, only the main type is matched.
"""
for subpart in msg.walk():
if subpart.get_content_maintype() == maintype:
if subtype is None or subpart.get_content_subtype() == subtype:
yield subpart
def _structure(msg, fp=None, level=0, include_default=False):
"""A handy debugging aid"""
if fp is None:
fp = sys.stdout
tab = ' ' * (level * 4)
print >> fp, tab + msg.get_content_type(),
if include_default:
print >> fp, '[%s]' % msg.get_default_type()
else:
print >> fp
if msg.is_multipart():
for subpart in msg.get_payload():
_structure(subpart, fp, level+1, include_default)
| body_line_iterator |
update.go | package handlers
import (
"encoding/json"
"fmt"
"io/ioutil"
"log"
"net/http"
"time"
"github.com/openfaas/faas/gateway/requests"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
)
// MakeUpdateHandler update specified function
func MakeUpdateHandler(functionNamespace string, clientset *kubernetes.Clientset) http.HandlerFunc | {
return func(w http.ResponseWriter, r *http.Request) {
defer r.Body.Close()
body, _ := ioutil.ReadAll(r.Body)
request := requests.CreateFunctionRequest{}
err := json.Unmarshal(body, &request)
if err != nil {
w.WriteHeader(http.StatusBadRequest)
return
}
getOpts := metav1.GetOptions{}
deployment, findDeployErr := clientset.ExtensionsV1beta1().
Deployments(functionNamespace).
Get(request.Service, getOpts)
if findDeployErr != nil {
w.WriteHeader(http.StatusNotFound)
w.Write([]byte(findDeployErr.Error()))
return
}
if len(deployment.Spec.Template.Spec.Containers) > 0 {
deployment.Spec.Template.Spec.Containers[0].Image = request.Image
// Disabling update support to prevent unexpected mutations of deployed functions,
// since imagePullPolicy is now configurable. This could be reconsidered later depending
// on desired behavior, but will need to be updated to take config.
//deployment.Spec.Template.Spec.Containers[0].ImagePullPolicy = v1.PullAlways
deployment.Spec.Template.Spec.Containers[0].Env = buildEnvVars(&request)
deployment.Spec.Template.Spec.NodeSelector = createSelector(request.Constraints)
labels := map[string]string{
"faas_function": request.Service,
"uid": fmt.Sprintf("%d", time.Now().Nanosecond()),
}
if request.Labels != nil {
if min := getMinReplicaCount(*request.Labels); min != nil {
deployment.Spec.Replicas = min
}
for k, v := range *request.Labels {
labels[k] = v
}
}
deployment.Labels = labels
deployment.Spec.Template.ObjectMeta.Labels = labels
resources, resourceErr := createResources(request)
if resourceErr != nil {
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte(resourceErr.Error()))
return
}
deployment.Spec.Template.Spec.Containers[0].Resources = *resources
existingSecrets, err := getSecrets(clientset, functionNamespace, request.Secrets)
if err != nil {
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte(err.Error()))
return
}
err = UpdateSecrets(request, deployment, existingSecrets)
if err != nil {
log.Println(err)
w.WriteHeader(http.StatusBadRequest)
w.Write([]byte(err.Error()))
return
}
}
if _, updateErr := clientset.ExtensionsV1beta1().
Deployments(functionNamespace).
Update(deployment); updateErr != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(updateErr.Error()))
}
}
} |
|
kubernetes.go | package platform
import (
"errors"
"fmt"
"github.com/fatih/color"
"github.com/mitchellh/go-homedir"
"io/ioutil"
"k8s.io/apimachinery/pkg/runtime"
"os"
"regexp"
"strings"
"github.com/AlexsJones/gravitywell/configuration"
"github.com/AlexsJones/gravitywell/state"
logger "github.com/sirupsen/logrus"
appsv1 "k8s.io/api/apps/v1"
"k8s.io/api/apps/v1beta1"
"k8s.io/api/apps/v1beta2"
autoscalingv1 "k8s.io/api/autoscaling/v1"
batchv1 "k8s.io/api/batch/v1"
batchbeta1 "k8s.io/api/batch/v1beta1"
"k8s.io/api/core/v1"
v1betav1 "k8s.io/api/extensions/v1beta1"
v1polbeta "k8s.io/api/policy/v1beta1"
v1rbac "k8s.io/api/rbac/v1"
storagev1 "k8s.io/api/storage/v1"
storagev1b1 "k8s.io/api/storage/v1beta1"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/kubernetes/scheme"
_ "k8s.io/client-go/plugin/pkg/client/auth/gcp"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/clientcmd"
//This is required for gcp auth provider scope
)
// GetKubeClient creates a Kubernetes config and client for a given kubeconfig context.
func GetKubeClient(context string) (*rest.Config, kubernetes.Interface, error) {
config, err := configForContext(context)
if err != nil {
return nil, nil, err
}
client, err := kubernetes.NewForConfig(config)
if err != nil {
return nil, nil, fmt.Errorf("could not get Kubernetes client: %s", err)
}
return config, client, nil
}
// configForContext creates a Kubernetes REST client configuration for a given kubeconfig context.
func configForContext(context string) (*rest.Config, error) {
config, err := getConfig(context).ClientConfig()
if err != nil {
return nil, fmt.Errorf("could not get Kubernetes config for context %q: %s", context, err)
}
return config, nil
}
func findKubeConfig() (string, error) {
env := os.Getenv("KUBECONFIG")
if env != "" {
return env, nil
}
path, err := homedir.Expand("~/.kube/config")
if err != nil {
return "", err
}
return path, nil
}
// getConfig returns a Kubernetes client config for a given context.
func getConfig(c string) clientcmd.ClientConfig {
rules := clientcmd.NewDefaultClientConfigLoadingRules()
rules.DefaultClientConfig = &clientcmd.DefaultClientConfig
overrides := &clientcmd.ConfigOverrides{ClusterDefaults: clientcmd.ClusterDefaults}
kubeConfigPath, err := findKubeConfig()
if err != nil {
logger.Fatal(err)
}
kubeConfig, err := clientcmd.LoadFromFile(kubeConfigPath)
if err != nil {
logger.Fatal(err)
}
if c != "" {
for mapContext, _ := range kubeConfig.Contexts {
if strings.Contains(mapContext, c) {
overrides.CurrentContext = mapContext
continue
}
}
} | func delete_empty(s []string) []string {
var r []string
for _, str := range s {
if str != "" {
r = append(r, str)
}
}
return r
}
//GenerateDeploymentPlan
func GenerateDeploymentPlan(k kubernetes.Interface,
files []string, namespace string, opts configuration.Options,
commandFlag configuration.CommandFlag, shouldAwaitDeployment bool) error {
var kubernetesResources []runtime.Object
for _, file := range files {
f, err := os.Open(file)
if err != nil {
logger.Fatalf(fmt.Sprintf("Could not open from file %s", file))
}
raw, err := ioutil.ReadAll(f)
if err != nil {
logger.Fatalf(fmt.Sprintf("Could not read from file %s", file))
}
yamldelimiter := regexp.MustCompile(`(\A|\n)---`)
documents := delete_empty(yamldelimiter.Split(string(raw), -1))
for _, doc := range documents {
if doc == "" {
continue
}
//Decode into kubernetes object
decode := scheme.Codecs.UniversalDeserializer().Decode
obj, kind, err := decode([]byte(doc), nil, nil)
if err != nil {
logger.Fatalf(fmt.Sprintf("%s : Could not be decoded : %s", file, err.Error()))
}
logger.Infof("Decoded Kind: %s", kind.String())
kubernetesResources = append(kubernetesResources, obj)
}
}
//TODO: Deployment plan printing
if len(kubernetesResources) == 0 {
return errors.New("no resources within file list")
}
//Run namespace first
out := 0
for _, resource := range kubernetesResources {
gvk := resource.GetObjectKind().GroupVersionKind()
switch strings.ToLower(gvk.Kind) {
case "namespace":
//Remove the namespace from the array and run first
_, err := execV1NamespaceResource(k, resource.(*v1.Namespace), namespace, opts, commandFlag)
if err != nil {
logger.Fatalf(err.Error())
}
default:
kubernetesResources[out] = resource
out++
}
}
kubernetesResources = kubernetesResources[:out]
//Run all other resources
for _, resource := range kubernetesResources {
s, err := DeployFromObject(k, resource, namespace, opts, commandFlag, shouldAwaitDeployment)
if err != nil {
logger.Fatalf(fmt.Sprintf("%s : %s", err.Error(), resource.GetObjectKind().GroupVersionKind().Kind))
}
switch s {
case state.EDeploymentStateError:
color.Red(fmt.Sprintf("%s STATE: %s",
resource.GetObjectKind().GroupVersionKind().Kind, state.Translate(s)))
os.Exit(1)
default:
color.Green(fmt.Sprintf("%s STATE: %s",
resource.GetObjectKind().GroupVersionKind().Kind, state.Translate(s)))
}
}
return nil
}
//DeployFromObject ...
func DeployFromObject(k kubernetes.Interface, obj runtime.Object,
namespace string, opts configuration.Options,
commandFlag configuration.CommandFlag, shouldAwaitDeployment bool) (state.State, error) {
var response state.State
var e error
switch obj.(type) {
case *v1.Pod:
response, e = execV1PodResource(k, obj.(*v1.Pod), namespace, opts, commandFlag)
case *v1.PersistentVolume:
response, e = execV1PersistentVolumeResource(k, obj.(*v1.PersistentVolume), namespace, opts, commandFlag)
case *batchbeta1.CronJob:
response, e = execV1Beta1CronJob(k, obj.(*batchbeta1.CronJob), namespace, opts, commandFlag)
case *batchv1.Job:
response, e = execV1Job(k, obj.(*batchv1.Job), namespace, opts, commandFlag)
case *storagev1.StorageClass:
response, e = execV1StorageResource(k, obj.(*storagev1.StorageClass), namespace, opts, commandFlag)
case *v1betav1.Deployment:
response, e = execV1BetaDeploymentResource(k, obj.(*v1betav1.Deployment),
namespace, opts, commandFlag, shouldAwaitDeployment)
case *v1beta1.Deployment:
response, e = execV1Beta1DeploymentResource(k, obj.(*v1beta1.Deployment),
namespace, opts, commandFlag, shouldAwaitDeployment)
case *v1beta2.Deployment:
response, e = execV2BetaDeploymentResource(k, obj.(*v1beta2.Deployment),
namespace, opts, commandFlag, shouldAwaitDeployment)
case *v1beta1.StatefulSet:
response, e = execV1Beta1StatefulSetResource(k, obj.(*v1beta1.StatefulSet), namespace, opts, commandFlag, shouldAwaitDeployment)
case *appsv1.StatefulSet:
response, e = execV1StatefulSetResource(k, obj.(*appsv1.StatefulSet),
namespace, opts, commandFlag, shouldAwaitDeployment)
case *v1.Secret:
response, e = execV1SecretResource(k, obj.(*v1.Secret), namespace, opts, commandFlag)
case *v1.Service:
response, e = execV1ServiceResouce(k, obj.(*v1.Service), namespace, opts, commandFlag)
case *v1.ConfigMap:
response, e = execV1ConfigMapResource(k, obj.(*v1.ConfigMap), namespace, opts, commandFlag)
case *v1polbeta.PodDisruptionBudget:
response, e = execV1Beta1PodDisruptionBudgetResouce(k, obj.(*v1polbeta.PodDisruptionBudget), namespace, opts, commandFlag)
case *v1.ServiceAccount:
response, e = execV1ServiceAccountResource(k, obj.(*v1.ServiceAccount), namespace, opts, commandFlag)
//V1 RBAC
case *v1rbac.ClusterRoleBinding:
response, e = execV1RbacClusterRoleBindingResouce(k, obj.(*v1rbac.ClusterRoleBinding), namespace, opts, commandFlag)
case *v1rbac.Role:
response, e = execV1RbacRoleResouce(k, obj.(*v1rbac.Role), namespace, opts, commandFlag)
case *v1rbac.RoleBinding:
response, e = execV1RbacRoleBindingResouce(k, obj.(*v1rbac.RoleBinding), namespace, opts, commandFlag)
case *v1rbac.ClusterRole:
response, e = execV1AuthClusterRoleResouce(k, obj.(*v1rbac.ClusterRole), namespace, opts, commandFlag)
case *v1betav1.DaemonSet:
response, e = execV1DaemonSetResource(k, obj.(*v1betav1.DaemonSet), namespace, opts, commandFlag, shouldAwaitDeployment)
case *v1betav1.Ingress:
response, e = execV1Beta1IngressResouce(k, obj.(*v1betav1.Ingress), namespace, opts, commandFlag)
case *storagev1b1.StorageClass:
response, e = execV1Beta1StorageResouce(k, obj.(*storagev1b1.StorageClass), namespace, opts, commandFlag)
case *autoscalingv1.HorizontalPodAutoscaler:
response, e = execV1HorizontalPodAutoscaler(k, obj.(*autoscalingv1.HorizontalPodAutoscaler), namespace, opts, commandFlag)
default:
logger.Error("Unable to convert API resource:", obj.GetObjectKind().GroupVersionKind())
}
return response, e
} | return clientcmd.NewNonInteractiveDeferredLoadingClientConfig(rules, overrides)
}
|
mail.rs | use crate::app_state::AppState;
use crate::dev::*;
use actix::prelude::*;
use actix_web::web;
use jsonwebtoken::{encode, Algorithm, EncodingKey, Header};
use lettre::message::header;
use lettre::message::Mailbox;
use lettre::transport::smtp::authentication::Credentials;
use lettre::{SmtpTransport, Transport};
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::time::SystemTime;
pub struct Mail {
smtp: SmtpTransport,
from: Mailbox,
secret: String,
host: String,
app_state: Option<web::Data<AppState>>,
}
impl Actor for Mail {
type Context = Context<Self>;
}
#[derive(Message)]
#[rtype(result = "()")]
pub struct SetAppState(pub web::Data<AppState>);
impl Handler<SetAppState> for Mail {
type Result = ();
fn handle(&mut self, msg: SetAppState, _: &mut Self::Context) -> Self::Result {
self.app_state = Some(msg.0);
}
}
#[derive(Debug, Clone, Message, Serialize, Deserialize)]
#[rtype(result = "Result<()>")]
pub struct SendVerification {
pub email: String,
pub user_id: String,
pub token: String,
pub exp: usize,
}
impl Handler<SendVerification> for Mail {
type Result = Result<()>;
fn handle(&mut self, msg: SendVerification, _: &mut Self::Context) -> Self::Result {
if let Some(app_state) = &self.app_state {
let token = encode(
&Header::new(Algorithm::HS256),
&msg,
&EncodingKey::from_secret(self.secret.as_ref()),
)?;
let body = app_state
.render(
"mail.hbs",
&json!({
"host": self.host,
"token": token,
"user_id": msg.user_id,
"expire": SystemTime::now() + TOKEN_VALID_DURATION
}),
)
.unwrap();
let msg = lettre::Message::builder()
.from(self.from.clone())
.to(msg.email.parse().unwrap())
.subject("Finish your registration to Web Mighty")
.header(header::ContentType::TEXT_HTML)
.body(body)
.unwrap();
self.smtp.send(&msg)?;
Ok(())
} else {
bail!("mail is not initialized");
}
}
}
impl Mail {
pub fn new(
from: String,
username: String,
password: String,
host: String,
server_host: String,
secret: String,
) -> Mail {
let cred = Credentials::new(username, password);
Mail {
smtp: SmtpTransport::relay(&*host).unwrap().credentials(cred).build(),
from: from.parse().unwrap(),
secret,
host: server_host,
app_state: None,
}
} | } |
|
test_preferences.py | import datetime
import pytest
import unittest
from quickbooks import QuickBooks
from quickbooks.objects.preferences import Preferences
def check_valid_date(date_text):
try:
datetime.datetime.strptime(date_text, '%Y-%m-%d')
return True
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
def test_unicode():
preferences = Preferences()
assert str(preferences) == "Preferences"
def test_valid_date():
preferences = Preferences()
preferences.BookDateClosed = "2022-04-07"
assert check_valid_date(preferences.BookDateClosed) == True
with pytest.raises(ValueError):
check_valid_date("2022-04-07 10:46:32 AM")
def test_valid_object_name():
| obj = Preferences()
client = QuickBooks()
result = client.isvalid_object_name(obj.qbo_object_name)
assert result == True |
|
Botconnection.py | """
Author : Robin Phoeng
Date : 24/06/2018
"""
import discord
from discord.ext.commands import Bot
from discord.ext import commands
import asyncio
import time
import random
from Game import Game
import DiscordUtility
import BarFactory
from Bar import Box
Client = discord.Client()
bot = commands.Bot(command_prefix="!")
playerRole = None
GMRole = None
game_object = Game()
@bot.event
async def on_ready():
# will print to python console
print("Bot is ready!")
"""
####################################################################
Utility Functions
####################################################################
"""
@bot.command(pass_context=True)
async def hello(context):
await bot.say("<@%s> hello" % context.message.author.id)
@bot.command(pass_context=False)
async def cookie():
await bot.say(":cookie:")
@bot.command(pass_context=False)
async def roll(*list):
"""
handles rolls on advantages
:param list: list of potential numbers
"""
advantages = []
for i in list:
try:
advantages.append(int(i))
except ValueError:
await bot.say("%s is not a valid integer" % i)
return
[d1,d2,d3,d4,t] = DiscordUtility.fudgeRoll()
output = "```\nroll : %d + %d + %d + %d = %d" % (d1,d2,d3,d4,t)
output += "\ntotal : %d" % t # add total to new line
for i in advantages:
output += " + %d" % i
t += i
# add total at the end
output += " = %d\n```" % t
await bot.say(output)
"""
####################################################################
Discord Set up
####################################################################
"""
@bot.command(pass_context=True)
async def assign(context,role_arg,id):
if context.message.server.owner != context.message.author:
await not_owner_message()
return
role = role_arg.lower() # non-case-sensitive
if role in ["p", "player", "gm", "gamemaster"]:
role_id = DiscordUtility.valid_role(id)
if role_id is None:
await bot.say("not a valid role")
return
if role in ["p","player"]:
global playerRole
playerRole = role_id
await bot.say("player role set")
else:
global GMRole
GMRole = role_id
await bot.say("GM role set")
else:
await bot.say("assign role of(p)layer or gamemaster(gm)")
@bot.command(pass_context=True)
async def load(context,action,*args):
if context.message.server.owner != context.message.author:
await not_owner_message()
return
a = action.lower()
if a in ["roles", "r"]:
with open("Game/roles.txt","r") as text:
roles = text.read().split("\n")
global playerRole
global GMRole
playerRole = roles[0]
GMRole = roles[1]
await bot.say("roles loaded")
@bot.command(pass_context=True)
async def save(context,action, *args):
if context.message.server.owner != context.message.author:
await not_owner_message()
return
a = action.lower()
if a in ["roles","r"]:
with open("Game/roles.txt","w") as roles:
global playerRole
global GMRole
roles.write(playerRole + "\n")
roles.write(GMRole)
await bot.say("roles saved")
"""
####################################################################
Game Set up
####################################################################
"""
@bot.command(pass_context=True)
async def game(context,arg):
roles = list(role.id for role in context.message.author.roles) # get role ids
if DiscordUtility.is_role(GMRole,roles):
if arg == "start":
await bot.say("Start Game")
if arg == "refresh":
game_object.refresh()
await bot.say("refresh Game")
if arg == "details":
await bot.say(str(game_object))
else:
await not_gm_message()
@bot.command(pass_context=True)
async def c(context, a1, a2=None):
await new_character(context,a1,a2)
@bot.command(pass_context=True)
async def character(context,a1,a2=None):
await new_character(context,a1,a2)
async def new_character(context,a1, a2=None):
"""
make a new character
:param context: context
:param a1: id, could also be name
:param a2: name
"""
roles = list(role.id for role in context.message.author.roles) # get role ids
player = DiscordUtility.is_role(playerRole, roles)
gm = DiscordUtility.is_role(GMRole, roles)
if not player and not gm:
return None
if gm:
game_id = DiscordUtility.valid_id(a1)
if game_id is None: # not a valid mention, test for name
await bot.say("no user targeted")
return
else:
game_id = context.message.author.id
if gm:
name = a2
else:
name = a1
if game_object.player_link_character(name, game_id):
await bot.say("character linked")
else:
await bot.say("character added")
@bot.command(pass_context=True)
async def o(context, *names):
await new_object(context,names)
@bot.command(pass_context=True)
async def object(context,*names):
await new_object(context,names)
async def new_object(context,names):
"""
make a new character
:param context: context
:param name: tuples
"""
roles = list(role.id for role in context.message.author.roles) # get role ids
gm = DiscordUtility.is_role(GMRole, roles)
if gm:
for name in names:
game_object.new_character(name)
await bot.say("character added")
else:
await not_gm_message()
"""
####################################################################
Game Utility
####################################################################
"""
@bot.command(pass_context=True)
async def info(context, arg=None):
"""
Gets information of a character
:param context: context
:param arg: can be an id, or name
"""
if arg is None:
name = context.message.author.id
else:
name = DiscordUtility.valid_id(arg)
if name is None: # not a valid mention, test for name
name = arg.lower()
cha = game_object.get_character(name)
if cha is None:
await bot.say("no character")
else:
await bot.say(str(cha))
"""
####################################################################
Game Play
####################################################################
"""
@bot.command(pass_context=True)
async def aspect(context, action, player_id, *text):
"""
Handles aspect addition and removal
:param context: context of message
:param action: one of [add, a] or [remove, r]
:param player_id: optional player id, for gm use only
:param text: the list of aspects to add
"""
header = await gm_player_command_header(context, player_id, text)
if header is None:
return
[player, gm, cha, args] = header
a = action.lower() # non-case sensitive
if a in ["add", "a"]:
for t in args:
cha.add_aspect(t)
await bot.say("aspect(s) added")
elif a in ["remove","r"]:
for t in args:
cha.remove_aspect(t)
await bot.say("aspect(s) removed")
@bot.command(pass_context=True)
async def skill(context,action, player_id,*text):
"""
handle addition and removal of skills
:param context: context
:param action: either [add,a] or [remove,r]
:param player_id: optional id for gm use
:param text: the list of arguments
"""
header = await gm_player_command_header(context,player_id,text)
if header is None:
return
[player, gm, cha, args] = header
a = action.lower() # non-case sensitive
if a in ["add", "a"]:
pairs = zip(args[0::2],args[1::2])
# each pair goes (name, level)
for p in pairs:
# TODO check that the order is in pairs
cha.add_skill(int(p[1]),p[0])
await bot.say("skill(s) added")
elif a in ["remove", "r"]:
for t in args:
cha.remove_skill(t) # player only refers to self
await bot.say("skill(s) removed")
@bot.command(pass_context=True)
async def bar(context, action, id, *text):
"""
Do stuff with bars
:param context: context
:param action: one of [spend, s] [refresh,r] [add,a] [remove r]
:param id: optional name for gm use
:param text: list of arguments
:return:
"""
header = await gm_player_command_header(context, id, text)
if header is None:
return
[player, gm, cha, args] = header
a = action.lower()
if a in ["s","spend"]:
for t in args:
cha.spend_bar(t)
await bot.say("bar(s) spent")
elif a in ["re","refresh"]:
if gm: # only gm can refresh a bar
for t in args:
cha.refresh_bar(t)
await bot.say("bar(s) refreshed")
else:
await not_gm_message()
elif a in ["add","a"]:
for t in args:
cha.add_bar(BarFactory.bar_default(t))
await bot.say("bar(s) added")
elif a in ["remove","r"]:
for t in args:
cha.remove_bar(t)
await bot.say("bar(s) removed")
@bot.command(pass_context=True)
async def | (context, action, id, *text):
"""
Handle box addition, removal
for removal, it will do it in order of removal.
If you call remove[0] remove[0] it will remove boxes 0 and 1.
as 1 shifted into place.
:param context: context
:param action: one of [remove,r] or [add,a]
:param id: optional id for gm
:param text: tuple of arguments
"""
header = await gm_player_command_header(context, id, text)
if header is None:
return
[player, gm, cha, args] = header
a = action.lower()
if a in ["s","spend"]:
pairs = zip(args[0::2], args[1::2]) # generate pairs
# must be in pairs of 2, going box, bar
for p in pairs:
cha.spend_box(p[0],int(p[1]))
await bot.say("box(s) spent")
elif a in ["re","refresh"]:
if gm: # only gm can refresh a box
pairs = zip(args[::2], args[1::2]) # generate pairs
for p in pairs:
cha.refresh_box(p[0], int(p[1]))
await bot.say("box(s) refreshed")
else:
await not_gm_message()
elif a in ["add","a"]:
pairs = zip(args[0::2], args[1::2]) # generate pairs
for p in pairs:
bar = cha.get_bar(p[0])
if bar is None:
continue
bar.add_box(Box(int(p[1])))
await bot.say("box(s) added")
elif a in ["remove", "r"]:
pairs = zip(args[0::2], args[1::2]) # generate pairs
for p in pairs:
bar = cha.get_bar(p[0])
if bar is None:
continue
bar.remove_box(Box(int(p[1])))
await bot.say("box(s) removed")
@bot.command(pass_context=True)
async def consequence(context, action, modifier, id=None, *text):
await consequence_code(context,action,modifier,id,text)
@bot.command(pass_context=True) # shortcut for consequence
async def cons(context, action, modifier, id=None, *text):
await consequence_code(context,action,modifier,id,text)
async def consequence_code(context, action, modifier, id=None, *text):
"""
Do stuff with barsa
:param context: context
:param action: one of [spend, s] [refresh,r] [add,a] [remove r]
:param id: optional name for gm use
:param modifier: the consequence we are affecting
:param text: list of arguments
:return:
"""
header = await gm_player_command_header(context, id, text)
if header is None:
return
[player, gm, cha, args] = header
m = await positive_num(modifier)
a = action.lower()
if a in ["add","a"]:
cha.add_consequence(m)
await bot.say("consequence added")
elif a in ["remove","r"]:
if gm:
cha.remove_consequence(m)
await bot.say("consequence removed")
else:
await not_gm_message()
elif a in ["info","i"]:
cons = cha.get_consequence(m)
await bot.say(str(cons))
elif a in ["text","t"]:
cons = cha.get_consequence(m)
cons.set_text(args[0]) # take only the first argument
await bot.say("consequence text changed")
elif a in ["aa","aspectadd"]:
cons = cha.get_consequence(m)
for t in args:
cons.add_aspect(t)
await bot.say("consequence aspect(s) added")
elif a in ["ar","aspectremove"]:
if gm:
cons = cha.get_consequence(m)
for t in args:
cons.remove_aspect(t)
await bot.say("consequence aspect(s) removed")
else:
await not_gm_message()
@bot.command(pass_context=True)
async def fate(context, action, id=None, *args):
"""
Do stuff with bars
:param context: context
:param action: spend or s, give or g
:param id : optional id
:param args: list of arguments
"""
header = await gm_player_command_header(context, id, args)
if header is None:
return
[player, gm, cha, args] = header
a = action.lower()
if a in ["s","spend"]:
amount = await positive_num(args[0])
if amount is not None and cha.change_fate(-amount):
await bot.say("%s fate spent" % amount)
else:
await bot.say("not enough fate points")
elif a in ["g", "give"]:
if gm:
amount = await positive_num(args[0])
if amount is not None and cha.change_fate(amount):
await bot.say("%s fate given" % amount)
else:
await bot.say("exceed maximum fate points")
else:
await not_gm_message()
elif a in ["i","info"]:
await bot.send_message(context.message.author, "%s has %d fate points" % (cha.get_name(), cha.get_fate()))
elif a in ["rset","refreshSet"]:
if gm: # GM only action
amount = await positive_num(args[0])
cha.set_refresh_fate(amount)
await bot.say("character refresh set to %d" % amount)
else:
await not_gm_message()
async def positive_num(num):
"""
check a number is an non-negative integer.
:param num: string to be check
:return: None if invalid, integer if valid
"""
try:
amount = int(num)
except ValueError:
await bot.say("not valid amount")
return None
if amount <= 0:
await bot.say("cannot be 0 or negative")
return None
return amount
@bot.event
async def on_message(message):
await bot.process_commands(message)
async def gm_player_command_header(context,id,text):
"""
Sorts out player and gm commands
:param context: context of message
:param id: optional id entered (may not be id)
:param text: the list of arguments as a tuple
:return: player : boolean, gm : boolean, character : Character, args : list
"""
roles = list(role.id for role in context.message.author.roles) # get role ids
player = DiscordUtility.is_role(playerRole, roles)
gm = DiscordUtility.is_role(GMRole, roles)
if not player and not gm:
return None
if gm:
game_id = DiscordUtility.valid_id(id)
if game_id is None: # not a valid mention, test for name
game_id = id.lower()
else:
game_id = context.message.author.id
if game_id is None:
return None # end command
cha = game_object.get_character(game_id)
if cha is None:
await bot.say("no character targeted")
return None # end command
args = list(text)
if player and not gm:
args.insert(0,id) # add id
return [player,gm,cha,args]
async def not_gm_message():
"""
sends a not gm message, used to reduce connascence
"""
await bot.say("not gm")
async def not_owner_message():
"""
send a not owner message, used to reduce connascence
:return:
"""
await bot.say("not owner")
"""
Read token from file, which is git-ignored to prevent stolen bot
file should just have token in it.
"""
with open("token.txt", "r") as file:
token = file.read()
bot.run(token)
| box |
queue.rs | // Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use std::num::NonZeroU32;
use deno_core::error::AnyError;
use deno_core::OpState;
use deno_core::ResourceId;
use deno_core::ZeroCopyBuf;
use serde::Deserialize;
use super::error::WebGpuResult;
type WebGpuQueue = super::WebGpuDevice;
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct QueueSubmitArgs {
queue_rid: ResourceId,
command_buffers: Vec<ResourceId>,
}
pub fn op_webgpu_queue_submit(
state: &mut OpState,
args: QueueSubmitArgs,
_: (),
) -> Result<WebGpuResult, AnyError> {
let instance = state.borrow::<super::Instance>();
let queue_resource =
state.resource_table.get::<WebGpuQueue>(args.queue_rid)?; | let mut ids = vec![];
for rid in args.command_buffers {
let buffer_resource =
state
.resource_table
.get::<super::command_encoder::WebGpuCommandBuffer>(rid)?;
ids.push(buffer_resource.0);
}
let maybe_err =
gfx_select!(queue => instance.queue_submit(queue, &ids)).err();
Ok(WebGpuResult::maybe_err(maybe_err))
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct GpuImageDataLayout {
offset: u64,
bytes_per_row: Option<u32>,
rows_per_image: Option<u32>,
}
impl From<GpuImageDataLayout> for wgpu_types::ImageDataLayout {
fn from(layout: GpuImageDataLayout) -> Self {
wgpu_types::ImageDataLayout {
offset: layout.offset,
bytes_per_row: NonZeroU32::new(layout.bytes_per_row.unwrap_or(0)),
rows_per_image: NonZeroU32::new(layout.rows_per_image.unwrap_or(0)),
}
}
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct QueueWriteBufferArgs {
queue_rid: ResourceId,
buffer: ResourceId,
buffer_offset: u64,
data_offset: usize,
size: Option<usize>,
}
pub fn op_webgpu_write_buffer(
state: &mut OpState,
args: QueueWriteBufferArgs,
zero_copy: ZeroCopyBuf,
) -> Result<WebGpuResult, AnyError> {
let instance = state.borrow::<super::Instance>();
let buffer_resource = state
.resource_table
.get::<super::buffer::WebGpuBuffer>(args.buffer)?;
let buffer = buffer_resource.0;
let queue_resource =
state.resource_table.get::<WebGpuQueue>(args.queue_rid)?;
let queue = queue_resource.0;
let data = match args.size {
Some(size) => &zero_copy[args.data_offset..(args.data_offset + size)],
None => &zero_copy[args.data_offset..],
};
let maybe_err = gfx_select!(queue => instance.queue_write_buffer(
queue,
buffer,
args.buffer_offset,
data
))
.err();
Ok(WebGpuResult::maybe_err(maybe_err))
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct QueueWriteTextureArgs {
queue_rid: ResourceId,
destination: super::command_encoder::GpuImageCopyTexture,
data_layout: GpuImageDataLayout,
size: super::texture::GpuExtent3D,
}
pub fn op_webgpu_write_texture(
state: &mut OpState,
args: QueueWriteTextureArgs,
zero_copy: ZeroCopyBuf,
) -> Result<WebGpuResult, AnyError> {
let instance = state.borrow::<super::Instance>();
let texture_resource = state
.resource_table
.get::<super::texture::WebGpuTexture>(args.destination.texture)?;
let queue_resource =
state.resource_table.get::<WebGpuQueue>(args.queue_rid)?;
let queue = queue_resource.0;
let destination = wgpu_core::command::ImageCopyTexture {
texture: texture_resource.0,
mip_level: args.destination.mip_level,
origin: args.destination.origin.into(),
aspect: args.destination.aspect.into(),
};
let data_layout = args.data_layout.into();
gfx_ok!(queue => instance.queue_write_texture(
queue,
&destination,
&*zero_copy,
&data_layout,
&args.size.into()
))
} | let queue = queue_resource.0;
|
decimal.rs | use forward_ref::{forward_ref_binop, forward_ref_op_assign};
use schemars::JsonSchema;
use serde::{de, ser, Deserialize, Deserializer, Serialize};
use std::cmp::Ordering;
use std::convert::TryInto;
use std::fmt::{self, Write};
use std::ops::{Add, AddAssign, Div, DivAssign, Mul, Sub, SubAssign};
use std::str::FromStr;
use thiserror::Error;
use crate::errors::StdError;
use crate::OverflowError;
use super::Fraction;
use super::Isqrt;
use super::{Uint128, Uint256};
/// A fixed-point decimal value with 18 fractional digits, i.e. Decimal(1_000_000_000_000_000_000) == 1.0
///
/// The greatest possible value that can be represented is 340282366920938463463.374607431768211455 (which is (2^128 - 1) / 10^18)
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, PartialOrd, Ord, JsonSchema)]
pub struct Decimal(#[schemars(with = "String")] Uint128);
#[derive(Error, Debug, PartialEq)]
#[error("Decimal range exceeded")]
pub struct DecimalRangeExceeded;
impl Decimal {
const DECIMAL_FRACTIONAL: Uint128 = Uint128::new(1_000_000_000_000_000_000u128); // 1*10**18
const DECIMAL_FRACTIONAL_SQUARED: Uint128 =
Uint128::new(1_000_000_000_000_000_000_000_000_000_000_000_000u128); // (1*10**18)**2 = 1*10**36
const DECIMAL_PLACES: usize = 18; // This needs to be an even number.
pub const MAX: Self = Self(Uint128::MAX);
/// Create a 1.0 Decimal
pub const fn one() -> Self {
Decimal(Self::DECIMAL_FRACTIONAL)
}
/// Create a 0.0 Decimal
pub const fn zero() -> Self {
Decimal(Uint128::zero())
}
/// Convert x% into Decimal
pub fn percent(x: u64) -> Self {
Decimal(((x as u128) * 10_000_000_000_000_000).into())
}
/// Convert permille (x/1000) into Decimal
pub fn permille(x: u64) -> Self {
Decimal(((x as u128) * 1_000_000_000_000_000).into())
}
/// Creates a decimal from a number of atomic units and the number
/// of decimal places. The inputs will be converted internally to form
/// a decimal with 18 decimal places. So the input 123 and 2 will create
/// the decimal 1.23.
///
/// Using 18 decimal places is slightly more efficient than other values
/// as no internal conversion is necessary.
///
/// ## Examples
///
/// ```
/// # use cosmwasm_std::{Decimal, Uint128};
/// let a = Decimal::from_atomics(Uint128::new(1234), 3).unwrap();
/// assert_eq!(a.to_string(), "1.234");
///
/// let a = Decimal::from_atomics(1234u128, 0).unwrap();
/// assert_eq!(a.to_string(), "1234");
///
/// let a = Decimal::from_atomics(1u64, 18).unwrap();
/// assert_eq!(a.to_string(), "0.000000000000000001");
/// ```
pub fn from_atomics(
atomics: impl Into<Uint128>,
decimal_places: u32,
) -> Result<Self, DecimalRangeExceeded> {
let atomics = atomics.into();
const TEN: Uint128 = Uint128::new(10);
Ok(match decimal_places.cmp(&(Self::DECIMAL_PLACES as u32)) {
Ordering::Less => {
let digits = (Self::DECIMAL_PLACES as u32) - decimal_places; // No overflow because decimal_places < DECIMAL_PLACES
let factor = TEN.checked_pow(digits).unwrap(); // Safe because digits <= 17
Self(
atomics
.checked_mul(factor)
.map_err(|_| DecimalRangeExceeded)?,
)
}
Ordering::Equal => Self(atomics),
Ordering::Greater => {
let digits = decimal_places - (Self::DECIMAL_PLACES as u32); // No overflow because decimal_places > DECIMAL_PLACES
if let Ok(factor) = TEN.checked_pow(digits) {
Self(atomics.checked_div(factor).unwrap()) // Safe because factor cannot be zero
} else {
// In this case `factor` exceeds the Uint128 range.
// Any Uint128 `x` divided by `factor` with `factor > Uint128::MAX` is 0.
// Try e.g. Python3: `(2**128-1) // 2**128`
Self(Uint128::zero())
}
}
})
}
/// Returns the ratio (numerator / denominator) as a Decimal
pub fn from_ratio(numerator: impl Into<Uint128>, denominator: impl Into<Uint128>) -> Self {
let numerator: Uint128 = numerator.into();
let denominator: Uint128 = denominator.into();
if denominator.is_zero() {
panic!("Denominator must not be zero");
}
Decimal(
// numerator * DECIMAL_FRACTIONAL / denominator
numerator.multiply_ratio(Self::DECIMAL_FRACTIONAL, denominator),
)
}
pub fn is_zero(&self) -> bool {
self.0.is_zero()
}
/// A decimal is an integer of atomic units plus a number that specifies the
/// position of the decimal dot. So any decimal can be expressed as two numbers.
///
/// ## Examples
///
/// ```
/// # use cosmwasm_std::{Decimal, Uint128};
/// # use std::str::FromStr;
/// // Value with whole and fractional part
/// let a = Decimal::from_str("1.234").unwrap();
/// assert_eq!(a.decimal_places(), 18);
/// assert_eq!(a.atomics(), Uint128::new(1234000000000000000));
///
/// // Smallest possible value
/// let b = Decimal::from_str("0.000000000000000001").unwrap();
/// assert_eq!(b.decimal_places(), 18);
/// assert_eq!(b.atomics(), Uint128::new(1));
/// ```
pub fn atomics(&self) -> Uint128 {
self.0
}
/// The number of decimal places. This is a constant value for now
/// but this could potentially change as the type evolves.
///
/// See also [`Decimal::atomics()`].
pub fn decimal_places(&self) -> u32 {
Self::DECIMAL_PLACES as u32
}
/// Multiplies one `Decimal` by another, returning an `OverflowError` if an overflow occurred.
pub fn checked_mul(self, other: Self) -> Result<Self, OverflowError> {
let result_as_uint256 = self.numerator().full_mul(other.numerator())
/ Uint256::from_uint128(Self::DECIMAL_FRACTIONAL); // from_uint128 is a const method and should be "free"
result_as_uint256
.try_into()
.map(Self)
.map_err(|_| OverflowError {
operation: crate::OverflowOperation::Mul,
operand1: self.to_string(),
operand2: other.to_string(),
})
}
/// Raises a value to the power of `exp`, returning an `OverflowError` if an overflow occurred.
pub fn checked_pow(self, exp: u32) -> Result<Self, OverflowError> {
// This uses the exponentiation by squaring algorithm:
// https://en.wikipedia.org/wiki/Exponentiation_by_squaring#Basic_method
fn inner(mut x: Decimal, mut n: u32) -> Result<Decimal, OverflowError> {
if n == 0 {
return Ok(Decimal::one());
}
let mut y = Decimal::one();
while n > 1 {
if n % 2 == 0 {
x = x.checked_mul(x)?;
n /= 2;
} else {
y = x.checked_mul(y)?;
x = x.checked_mul(x)?;
n = (n - 1) / 2;
}
}
Ok(x * y)
}
inner(self, exp).map_err(|_| OverflowError {
operation: crate::OverflowOperation::Pow,
operand1: self.to_string(),
operand2: exp.to_string(),
})
}
/// Returns the approximate square root as a Decimal.
///
/// This should not overflow or panic.
pub fn sqrt(&self) -> Self {
// Algorithm described in https://hackmd.io/@webmaster128/SJThlukj_
// We start with the highest precision possible and lower it until
// there's no overflow.
//
// TODO: This could be made more efficient once log10 is in:
// https://github.com/rust-lang/rust/issues/70887
// The max precision is something like `9 - log10(self.0) / 2`.
(0..=Self::DECIMAL_PLACES / 2)
.rev()
.find_map(|i| self.sqrt_with_precision(i))
// The last step (i = 0) is guaranteed to succeed because `isqrt(u128::MAX) * 10^9` does not overflow
.unwrap()
}
/// Lower precision means more aggressive rounding, but less risk of overflow.
/// Precision *must* be a number between 0 and 9 (inclusive).
///
/// Returns `None` if the internal multiplication overflows.
fn sqrt_with_precision(&self, precision: usize) -> Option<Self> {
let precision = precision as u32;
let inner_mul = 100u128.pow(precision);
self.0.checked_mul(inner_mul.into()).ok().map(|inner| {
let outer_mul = 10u128.pow(Self::DECIMAL_PLACES as u32 / 2 - precision);
Decimal(inner.isqrt().checked_mul(Uint128::from(outer_mul)).unwrap())
})
}
}
impl Fraction<Uint128> for Decimal {
#[inline]
fn numerator(&self) -> Uint128 {
self.0
}
#[inline]
fn denominator(&self) -> Uint128 {
Self::DECIMAL_FRACTIONAL
}
/// Returns the multiplicative inverse `1/d` for decimal `d`.
///
/// If `d` is zero, none is returned.
fn inv(&self) -> Option<Self> {
if self.is_zero() {
None
} else {
// Let self be p/q with p = self.0 and q = DECIMAL_FRACTIONAL.
// Now we calculate the inverse a/b = q/p such that b = DECIMAL_FRACTIONAL. Then
// `a = DECIMAL_FRACTIONAL*DECIMAL_FRACTIONAL / self.0`.
Some(Decimal(Self::DECIMAL_FRACTIONAL_SQUARED / self.0))
}
}
}
impl FromStr for Decimal {
type Err = StdError;
/// Converts the decimal string to a Decimal
/// Possible inputs: "1.23", "1", "000012", "1.123000000"
/// Disallowed: "", ".23"
///
/// This never performs any kind of rounding.
/// More than DECIMAL_PLACES fractional digits, even zeros, result in an error.
fn from_str(input: &str) -> Result<Self, Self::Err> {
let mut parts_iter = input.split('.');
let whole_part = parts_iter.next().unwrap(); // split always returns at least one element
let whole = whole_part
.parse::<Uint128>()
.map_err(|_| StdError::generic_err("Error parsing whole"))?;
let mut atomics = whole
.checked_mul(Self::DECIMAL_FRACTIONAL)
.map_err(|_| StdError::generic_err("Value too big"))?;
if let Some(fractional_part) = parts_iter.next() {
let fractional = fractional_part
.parse::<Uint128>()
.map_err(|_| StdError::generic_err("Error parsing fractional"))?;
let exp =
(Self::DECIMAL_PLACES.checked_sub(fractional_part.len())).ok_or_else(|| {
StdError::generic_err(format!(
"Cannot parse more than {} fractional digits",
Self::DECIMAL_PLACES
))
})?;
debug_assert!(exp <= Self::DECIMAL_PLACES);
let fractional_factor = Uint128::from(10u128.pow(exp as u32));
atomics = atomics
.checked_add(
// The inner multiplication can't overflow because
// fractional < 10^DECIMAL_PLACES && fractional_factor <= 10^DECIMAL_PLACES
fractional.checked_mul(fractional_factor).unwrap(),
)
.map_err(|_| StdError::generic_err("Value too big"))?;
}
if parts_iter.next().is_some() {
return Err(StdError::generic_err("Unexpected number of dots"));
}
Ok(Decimal(atomics))
}
}
impl fmt::Display for Decimal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let whole = (self.0) / Self::DECIMAL_FRACTIONAL;
let fractional = (self.0).checked_rem(Self::DECIMAL_FRACTIONAL).unwrap();
if fractional.is_zero() {
write!(f, "{}", whole)
} else {
let fractional_string =
format!("{:0>padding$}", fractional, padding = Self::DECIMAL_PLACES);
f.write_str(&whole.to_string())?;
f.write_char('.')?;
f.write_str(fractional_string.trim_end_matches('0'))?;
Ok(())
}
}
}
impl Add for Decimal {
type Output = Self;
fn add(self, other: Self) -> Self {
Decimal(self.0 + other.0)
}
}
forward_ref_binop!(impl Add, add for Decimal, Decimal);
impl AddAssign for Decimal {
fn add_assign(&mut self, rhs: Decimal) {
*self = *self + rhs;
}
}
forward_ref_op_assign!(impl AddAssign, add_assign for Decimal, Decimal);
impl Sub for Decimal {
type Output = Self;
fn sub(self, other: Self) -> Self {
Decimal(self.0 - other.0)
}
}
forward_ref_binop!(impl Sub, sub for Decimal, Decimal);
impl SubAssign for Decimal {
fn sub_assign(&mut self, rhs: Decimal) {
*self = *self - rhs;
}
}
forward_ref_op_assign!(impl SubAssign, sub_assign for Decimal, Decimal);
impl Mul for Decimal {
type Output = Self;
#[allow(clippy::suspicious_arithmetic_impl)]
fn mul(self, other: Self) -> Self {
// Decimals are fractions. We can multiply two decimals a and b
// via
// (a.numerator() * b.numerator()) / (a.denominator() * b.denominator())
// = (a.numerator() * b.numerator()) / a.denominator() / b.denominator()
let result_as_uint256 = self.numerator().full_mul(other.numerator())
/ Uint256::from_uint128(Self::DECIMAL_FRACTIONAL); // from_uint128 is a const method and should be "free"
match result_as_uint256.try_into() {
Ok(result) => Self(result),
Err(_) => panic!("attempt to multiply with overflow"),
}
}
}
/// Both d*u and u*d with d: Decimal and u: Uint128 returns an Uint128. There is no
/// specific reason for this decision other than the initial use cases we have. If you
/// need a Decimal result for the same calculation, use Decimal(d*u) or Decimal(u*d).
impl Mul<Decimal> for Uint128 {
type Output = Self;
#[allow(clippy::suspicious_arithmetic_impl)]
fn mul(self, rhs: Decimal) -> Self::Output {
// 0*a and b*0 is always 0
if self.is_zero() || rhs.is_zero() {
return Uint128::zero();
}
self.multiply_ratio(rhs.0, Decimal::DECIMAL_FRACTIONAL)
}
}
impl Mul<Uint128> for Decimal {
type Output = Uint128;
fn mul(self, rhs: Uint128) -> Self::Output {
rhs * self
}
}
impl Div<Uint128> for Decimal {
type Output = Self;
fn div(self, rhs: Uint128) -> Self::Output {
Decimal(self.0 / rhs)
}
}
impl DivAssign<Uint128> for Decimal {
fn div_assign(&mut self, rhs: Uint128) {
self.0 /= rhs;
}
}
impl<A> std::iter::Sum<A> for Decimal
where
Self: Add<A, Output = Self>,
{
fn sum<I: Iterator<Item = A>>(iter: I) -> Self {
iter.fold(Self::zero(), Add::add)
}
}
/// Serializes as a decimal string
impl Serialize for Decimal {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
/// Deserializes as a base64 string
impl<'de> Deserialize<'de> for Decimal {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(DecimalVisitor)
}
}
struct DecimalVisitor;
impl<'de> de::Visitor<'de> for DecimalVisitor {
type Value = Decimal;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("string-encoded decimal")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
match Decimal::from_str(v) {
Ok(d) => Ok(d),
Err(e) => Err(E::custom(format!("Error parsing decimal '{}': {}", v, e))),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{from_slice, to_vec};
#[test]
fn decimal_one() {
let value = Decimal::one();
assert_eq!(value.0, Decimal::DECIMAL_FRACTIONAL);
}
#[test]
fn decimal_zero() {
let value = Decimal::zero();
assert!(value.0.is_zero());
}
#[test]
fn decimal_percent() {
let value = Decimal::percent(50);
assert_eq!(value.0, Decimal::DECIMAL_FRACTIONAL / Uint128::from(2u8));
}
#[test]
fn decimal_permille() {
let value = Decimal::permille(125);
assert_eq!(value.0, Decimal::DECIMAL_FRACTIONAL / Uint128::from(8u8));
}
#[test]
fn decimal_from_atomics_works() {
let one = Decimal::one();
let two = one + one;
assert_eq!(Decimal::from_atomics(1u128, 0).unwrap(), one);
assert_eq!(Decimal::from_atomics(10u128, 1).unwrap(), one);
assert_eq!(Decimal::from_atomics(100u128, 2).unwrap(), one);
assert_eq!(Decimal::from_atomics(1000u128, 3).unwrap(), one);
assert_eq!(
Decimal::from_atomics(1000000000000000000u128, 18).unwrap(),
one
);
assert_eq!(
Decimal::from_atomics(10000000000000000000u128, 19).unwrap(),
one
);
assert_eq!(
Decimal::from_atomics(100000000000000000000u128, 20).unwrap(),
one
);
assert_eq!(Decimal::from_atomics(2u128, 0).unwrap(), two);
assert_eq!(Decimal::from_atomics(20u128, 1).unwrap(), two);
assert_eq!(Decimal::from_atomics(200u128, 2).unwrap(), two);
assert_eq!(Decimal::from_atomics(2000u128, 3).unwrap(), two);
assert_eq!(
Decimal::from_atomics(2000000000000000000u128, 18).unwrap(),
two
);
assert_eq!(
Decimal::from_atomics(20000000000000000000u128, 19).unwrap(),
two
);
assert_eq!(
Decimal::from_atomics(200000000000000000000u128, 20).unwrap(),
two
);
// Cuts decimal digits (20 provided but only 18 can be stored)
assert_eq!(
Decimal::from_atomics(4321u128, 20).unwrap(),
Decimal::from_str("0.000000000000000043").unwrap()
);
assert_eq!(
Decimal::from_atomics(6789u128, 20).unwrap(),
Decimal::from_str("0.000000000000000067").unwrap()
);
assert_eq!(
Decimal::from_atomics(u128::MAX, 38).unwrap(),
Decimal::from_str("3.402823669209384634").unwrap()
);
assert_eq!(
Decimal::from_atomics(u128::MAX, 39).unwrap(),
Decimal::from_str("0.340282366920938463").unwrap()
);
assert_eq!(
Decimal::from_atomics(u128::MAX, 45).unwrap(),
Decimal::from_str("0.000000340282366920").unwrap()
);
assert_eq!(
Decimal::from_atomics(u128::MAX, 51).unwrap(),
Decimal::from_str("0.000000000000340282").unwrap()
);
assert_eq!(
Decimal::from_atomics(u128::MAX, 56).unwrap(),
Decimal::from_str("0.000000000000000003").unwrap()
);
assert_eq!(
Decimal::from_atomics(u128::MAX, 57).unwrap(),
Decimal::from_str("0.000000000000000000").unwrap()
);
assert_eq!(
Decimal::from_atomics(u128::MAX, u32::MAX).unwrap(),
Decimal::from_str("0.000000000000000000").unwrap()
);
// Can be used with max value
let max = Decimal::MAX;
assert_eq!(
Decimal::from_atomics(max.atomics(), max.decimal_places()).unwrap(),
max
);
// Overflow is only possible with digits < 18
let result = Decimal::from_atomics(u128::MAX, 17);
assert_eq!(result.unwrap_err(), DecimalRangeExceeded);
}
#[test]
fn decimal_from_ratio_works() {
// 1.0
assert_eq!(Decimal::from_ratio(1u128, 1u128), Decimal::one());
assert_eq!(Decimal::from_ratio(53u128, 53u128), Decimal::one());
assert_eq!(Decimal::from_ratio(125u128, 125u128), Decimal::one());
// 1.5
assert_eq!(Decimal::from_ratio(3u128, 2u128), Decimal::percent(150));
assert_eq!(Decimal::from_ratio(150u128, 100u128), Decimal::percent(150));
assert_eq!(Decimal::from_ratio(333u128, 222u128), Decimal::percent(150));
// 0.125
assert_eq!(Decimal::from_ratio(1u64, 8u64), Decimal::permille(125));
assert_eq!(Decimal::from_ratio(125u64, 1000u64), Decimal::permille(125));
// 1/3 (result floored)
assert_eq!(
Decimal::from_ratio(1u64, 3u64),
Decimal(Uint128::from(333_333_333_333_333_333u128))
);
// 2/3 (result floored)
assert_eq!(
Decimal::from_ratio(2u64, 3u64),
Decimal(Uint128::from(666_666_666_666_666_666u128))
);
// large inputs
assert_eq!(Decimal::from_ratio(0u128, u128::MAX), Decimal::zero());
assert_eq!(Decimal::from_ratio(u128::MAX, u128::MAX), Decimal::one());
// 340282366920938463463 is the largest integer <= Decimal::MAX
assert_eq!(
Decimal::from_ratio(340282366920938463463u128, 1u128),
Decimal::from_str("340282366920938463463").unwrap()
);
}
#[test]
#[should_panic(expected = "Denominator must not be zero")]
fn decimal_from_ratio_panics_for_zero_denominator() {
Decimal::from_ratio(1u128, 0u128);
}
#[test]
fn decimal_implements_fraction() {
let fraction = Decimal::from_str("1234.567").unwrap();
assert_eq!(
fraction.numerator(),
Uint128::from(1_234_567_000_000_000_000_000u128)
);
assert_eq!(
fraction.denominator(),
Uint128::from(1_000_000_000_000_000_000u128)
);
}
#[test]
fn decimal_from_str_works() {
// Integers
assert_eq!(Decimal::from_str("0").unwrap(), Decimal::percent(0));
assert_eq!(Decimal::from_str("1").unwrap(), Decimal::percent(100));
assert_eq!(Decimal::from_str("5").unwrap(), Decimal::percent(500));
assert_eq!(Decimal::from_str("42").unwrap(), Decimal::percent(4200));
assert_eq!(Decimal::from_str("000").unwrap(), Decimal::percent(0));
assert_eq!(Decimal::from_str("001").unwrap(), Decimal::percent(100));
assert_eq!(Decimal::from_str("005").unwrap(), Decimal::percent(500));
assert_eq!(Decimal::from_str("0042").unwrap(), Decimal::percent(4200));
// Decimals
assert_eq!(Decimal::from_str("1.0").unwrap(), Decimal::percent(100));
assert_eq!(Decimal::from_str("1.5").unwrap(), Decimal::percent(150));
assert_eq!(Decimal::from_str("0.5").unwrap(), Decimal::percent(50));
assert_eq!(Decimal::from_str("0.123").unwrap(), Decimal::permille(123));
assert_eq!(Decimal::from_str("40.00").unwrap(), Decimal::percent(4000));
assert_eq!(Decimal::from_str("04.00").unwrap(), Decimal::percent(400));
assert_eq!(Decimal::from_str("00.40").unwrap(), Decimal::percent(40));
assert_eq!(Decimal::from_str("00.04").unwrap(), Decimal::percent(4));
// Can handle DECIMAL_PLACES fractional digits
assert_eq!(
Decimal::from_str("7.123456789012345678").unwrap(),
Decimal(Uint128::from(7123456789012345678u128))
);
assert_eq!(
Decimal::from_str("7.999999999999999999").unwrap(),
Decimal(Uint128::from(7999999999999999999u128))
);
// Works for documented max value
assert_eq!(
Decimal::from_str("340282366920938463463.374607431768211455").unwrap(),
Decimal::MAX
);
}
#[test]
fn decimal_from_str_errors_for_broken_whole_part() {
match Decimal::from_str("").unwrap_err() {
StdError::GenericErr { msg, .. } => assert_eq!(msg, "Error parsing whole"),
e => panic!("Unexpected error: {:?}", e),
}
match Decimal::from_str(" ").unwrap_err() {
StdError::GenericErr { msg, .. } => assert_eq!(msg, "Error parsing whole"),
e => panic!("Unexpected error: {:?}", e),
}
match Decimal::from_str("-1").unwrap_err() {
StdError::GenericErr { msg, .. } => assert_eq!(msg, "Error parsing whole"),
e => panic!("Unexpected error: {:?}", e),
}
}
#[test]
fn decimal_from_str_errors_for_broken_fractinal_part() {
match Decimal::from_str("1.").unwrap_err() {
StdError::GenericErr { msg, .. } => assert_eq!(msg, "Error parsing fractional"),
e => panic!("Unexpected error: {:?}", e),
}
match Decimal::from_str("1. ").unwrap_err() {
StdError::GenericErr { msg, .. } => assert_eq!(msg, "Error parsing fractional"),
e => panic!("Unexpected error: {:?}", e),
}
match Decimal::from_str("1.e").unwrap_err() {
StdError::GenericErr { msg, .. } => assert_eq!(msg, "Error parsing fractional"),
e => panic!("Unexpected error: {:?}", e),
}
match Decimal::from_str("1.2e3").unwrap_err() {
StdError::GenericErr { msg, .. } => assert_eq!(msg, "Error parsing fractional"),
e => panic!("Unexpected error: {:?}", e),
}
}
#[test]
fn decimal_from_str_errors_for_more_than_18_fractional_digits() {
match Decimal::from_str("7.1234567890123456789").unwrap_err() {
StdError::GenericErr { msg, .. } => {
assert_eq!(msg, "Cannot parse more than 18 fractional digits",)
}
e => panic!("Unexpected error: {:?}", e),
}
// No special rules for trailing zeros. This could be changed but adds gas cost for the happy path.
match Decimal::from_str("7.1230000000000000000").unwrap_err() {
StdError::GenericErr { msg, .. } => {
assert_eq!(msg, "Cannot parse more than 18 fractional digits")
}
e => panic!("Unexpected error: {:?}", e),
}
}
#[test]
fn decimal_from_str_errors_for_invalid_number_of_dots() {
match Decimal::from_str("1.2.3").unwrap_err() {
StdError::GenericErr { msg, .. } => assert_eq!(msg, "Unexpected number of dots"),
e => panic!("Unexpected error: {:?}", e),
}
match Decimal::from_str("1.2.3.4").unwrap_err() {
StdError::GenericErr { msg, .. } => assert_eq!(msg, "Unexpected number of dots"),
e => panic!("Unexpected error: {:?}", e),
}
}
#[test]
fn decimal_from_str_errors_for_more_than_max_value() {
// Integer
match Decimal::from_str("340282366920938463464").unwrap_err() {
StdError::GenericErr { msg, .. } => assert_eq!(msg, "Value too big"),
e => panic!("Unexpected error: {:?}", e),
}
// Decimal
match Decimal::from_str("340282366920938463464.0").unwrap_err() {
StdError::GenericErr { msg, .. } => assert_eq!(msg, "Value too big"),
e => panic!("Unexpected error: {:?}", e),
}
match Decimal::from_str("340282366920938463463.374607431768211456").unwrap_err() {
StdError::GenericErr { msg, .. } => assert_eq!(msg, "Value too big"),
e => panic!("Unexpected error: {:?}", e),
}
}
#[test]
fn decimal_atomics_works() {
let zero = Decimal::zero();
let one = Decimal::one();
let half = Decimal::percent(50);
let two = Decimal::percent(200);
let max = Decimal::MAX;
assert_eq!(zero.atomics(), Uint128::new(0));
assert_eq!(one.atomics(), Uint128::new(1000000000000000000));
assert_eq!(half.atomics(), Uint128::new(500000000000000000));
assert_eq!(two.atomics(), Uint128::new(2000000000000000000));
assert_eq!(max.atomics(), Uint128::MAX);
}
#[test]
fn decimal_decimal_places_works() {
let zero = Decimal::zero();
let one = Decimal::one();
let half = Decimal::percent(50);
let two = Decimal::percent(200);
let max = Decimal::MAX;
assert_eq!(zero.decimal_places(), 18);
assert_eq!(one.decimal_places(), 18);
assert_eq!(half.decimal_places(), 18);
assert_eq!(two.decimal_places(), 18);
assert_eq!(max.decimal_places(), 18);
}
#[test]
fn decimal_is_zero_works() {
assert!(Decimal::zero().is_zero());
assert!(Decimal::percent(0).is_zero());
assert!(Decimal::permille(0).is_zero());
assert!(!Decimal::one().is_zero());
assert!(!Decimal::percent(123).is_zero());
assert!(!Decimal::permille(1234).is_zero());
}
#[test]
fn decimal_inv_works() {
// d = 0
assert_eq!(Decimal::zero().inv(), None);
// d == 1
assert_eq!(Decimal::one().inv(), Some(Decimal::one()));
// d > 1 exact
assert_eq!(
Decimal::from_str("2").unwrap().inv(),
Some(Decimal::from_str("0.5").unwrap())
);
assert_eq!(
Decimal::from_str("20").unwrap().inv(),
Some(Decimal::from_str("0.05").unwrap())
);
assert_eq!(
Decimal::from_str("200").unwrap().inv(),
Some(Decimal::from_str("0.005").unwrap())
);
assert_eq!(
Decimal::from_str("2000").unwrap().inv(),
Some(Decimal::from_str("0.0005").unwrap())
);
// d > 1 rounded
assert_eq!(
Decimal::from_str("3").unwrap().inv(),
Some(Decimal::from_str("0.333333333333333333").unwrap())
);
assert_eq!(
Decimal::from_str("6").unwrap().inv(),
Some(Decimal::from_str("0.166666666666666666").unwrap())
);
// d < 1 exact
assert_eq!(
Decimal::from_str("0.5").unwrap().inv(),
Some(Decimal::from_str("2").unwrap())
);
assert_eq!(
Decimal::from_str("0.05").unwrap().inv(),
Some(Decimal::from_str("20").unwrap())
);
assert_eq!(
Decimal::from_str("0.005").unwrap().inv(),
Some(Decimal::from_str("200").unwrap())
);
assert_eq!(
Decimal::from_str("0.0005").unwrap().inv(),
Some(Decimal::from_str("2000").unwrap())
);
}
#[test]
#[allow(clippy::op_ref)]
fn decimal_add_works() {
let value = Decimal::one() + Decimal::percent(50); // 1.5
assert_eq!(
value.0,
Decimal::DECIMAL_FRACTIONAL * Uint128::from(3u8) / Uint128::from(2u8)
);
assert_eq!(
Decimal::percent(5) + Decimal::percent(4),
Decimal::percent(9)
);
assert_eq!(Decimal::percent(5) + Decimal::zero(), Decimal::percent(5));
assert_eq!(Decimal::zero() + Decimal::zero(), Decimal::zero());
// works for refs
let a = Decimal::percent(15);
let b = Decimal::percent(25);
let expected = Decimal::percent(40);
assert_eq!(a + b, expected);
assert_eq!(&a + b, expected);
assert_eq!(a + &b, expected);
assert_eq!(&a + &b, expected);
}
#[test]
#[should_panic(expected = "attempt to add with overflow")]
fn decimal_add_overflow_panics() {
let _value = Decimal::MAX + Decimal::percent(50);
}
#[test]
fn decimal_add_assign_works() {
let mut a = Decimal::percent(30);
a += Decimal::percent(20);
assert_eq!(a, Decimal::percent(50));
// works for refs
let mut a = Decimal::percent(15);
let b = Decimal::percent(3);
let expected = Decimal::percent(18);
a += &b;
assert_eq!(a, expected);
}
#[test]
#[allow(clippy::op_ref)]
fn decimal_sub_works() {
let value = Decimal::one() - Decimal::percent(50); // 0.5
assert_eq!(value.0, Decimal::DECIMAL_FRACTIONAL / Uint128::from(2u8));
assert_eq!(
Decimal::percent(9) - Decimal::percent(4),
Decimal::percent(5)
);
assert_eq!(Decimal::percent(16) - Decimal::zero(), Decimal::percent(16));
assert_eq!(Decimal::percent(16) - Decimal::percent(16), Decimal::zero());
assert_eq!(Decimal::zero() - Decimal::zero(), Decimal::zero());
// works for refs
let a = Decimal::percent(13);
let b = Decimal::percent(6);
let expected = Decimal::percent(7);
assert_eq!(a - b, expected);
assert_eq!(&a - b, expected);
assert_eq!(a - &b, expected);
assert_eq!(&a - &b, expected);
}
#[test]
#[should_panic(expected = "attempt to subtract with overflow")]
fn decimal_sub_overflow_panics() {
let _value = Decimal::zero() - Decimal::percent(50);
}
#[test]
fn decimal_sub_assign_works() {
let mut a = Decimal::percent(20);
a -= Decimal::percent(2);
assert_eq!(a, Decimal::percent(18));
// works for refs
let mut a = Decimal::percent(33);
let b = Decimal::percent(13);
let expected = Decimal::percent(20);
a -= &b;
assert_eq!(a, expected);
}
#[test]
fn decimal_implements_mul() {
let one = Decimal::one();
let two = one + one;
let half = Decimal::percent(50);
// 1*x and x*1
assert_eq!(one * Decimal::percent(0), Decimal::percent(0));
assert_eq!(one * Decimal::percent(1), Decimal::percent(1));
assert_eq!(one * Decimal::percent(10), Decimal::percent(10));
assert_eq!(one * Decimal::percent(100), Decimal::percent(100));
assert_eq!(one * Decimal::percent(1000), Decimal::percent(1000));
assert_eq!(one * Decimal::MAX, Decimal::MAX);
assert_eq!(Decimal::percent(0) * one, Decimal::percent(0));
assert_eq!(Decimal::percent(1) * one, Decimal::percent(1));
assert_eq!(Decimal::percent(10) * one, Decimal::percent(10));
assert_eq!(Decimal::percent(100) * one, Decimal::percent(100));
assert_eq!(Decimal::percent(1000) * one, Decimal::percent(1000));
assert_eq!(Decimal::MAX * one, Decimal::MAX);
// double
assert_eq!(two * Decimal::percent(0), Decimal::percent(0));
assert_eq!(two * Decimal::percent(1), Decimal::percent(2));
assert_eq!(two * Decimal::percent(10), Decimal::percent(20));
assert_eq!(two * Decimal::percent(100), Decimal::percent(200));
assert_eq!(two * Decimal::percent(1000), Decimal::percent(2000));
assert_eq!(Decimal::percent(0) * two, Decimal::percent(0));
assert_eq!(Decimal::percent(1) * two, Decimal::percent(2));
assert_eq!(Decimal::percent(10) * two, Decimal::percent(20));
assert_eq!(Decimal::percent(100) * two, Decimal::percent(200));
assert_eq!(Decimal::percent(1000) * two, Decimal::percent(2000));
// half
assert_eq!(half * Decimal::percent(0), Decimal::percent(0));
assert_eq!(half * Decimal::percent(1), Decimal::permille(5));
assert_eq!(half * Decimal::percent(10), Decimal::percent(5));
assert_eq!(half * Decimal::percent(100), Decimal::percent(50));
assert_eq!(half * Decimal::percent(1000), Decimal::percent(500));
assert_eq!(Decimal::percent(0) * half, Decimal::percent(0));
assert_eq!(Decimal::percent(1) * half, Decimal::permille(5));
assert_eq!(Decimal::percent(10) * half, Decimal::percent(5));
assert_eq!(Decimal::percent(100) * half, Decimal::percent(50));
assert_eq!(Decimal::percent(1000) * half, Decimal::percent(500));
fn dec(input: &str) -> Decimal {
Decimal::from_str(input).unwrap()
}
// Move left
let a = dec("123.127726548762582");
assert_eq!(a * dec("1"), dec("123.127726548762582"));
assert_eq!(a * dec("10"), dec("1231.27726548762582"));
assert_eq!(a * dec("100"), dec("12312.7726548762582"));
assert_eq!(a * dec("1000"), dec("123127.726548762582"));
assert_eq!(a * dec("1000000"), dec("123127726.548762582"));
assert_eq!(a * dec("1000000000"), dec("123127726548.762582"));
assert_eq!(a * dec("1000000000000"), dec("123127726548762.582"));
assert_eq!(a * dec("1000000000000000"), dec("123127726548762582"));
assert_eq!(a * dec("1000000000000000000"), dec("123127726548762582000"));
assert_eq!(dec("1") * a, dec("123.127726548762582"));
assert_eq!(dec("10") * a, dec("1231.27726548762582"));
assert_eq!(dec("100") * a, dec("12312.7726548762582"));
assert_eq!(dec("1000") * a, dec("123127.726548762582"));
assert_eq!(dec("1000000") * a, dec("123127726.548762582"));
assert_eq!(dec("1000000000") * a, dec("123127726548.762582"));
assert_eq!(dec("1000000000000") * a, dec("123127726548762.582"));
assert_eq!(dec("1000000000000000") * a, dec("123127726548762582"));
assert_eq!(dec("1000000000000000000") * a, dec("123127726548762582000"));
// Move right
let max = Decimal::MAX;
assert_eq!(
max * dec("1.0"),
dec("340282366920938463463.374607431768211455")
);
assert_eq!(
max * dec("0.1"),
dec("34028236692093846346.337460743176821145")
);
assert_eq!(
max * dec("0.01"),
dec("3402823669209384634.633746074317682114")
);
assert_eq!(
max * dec("0.001"),
dec("340282366920938463.463374607431768211")
);
assert_eq!(
max * dec("0.000001"),
dec("340282366920938.463463374607431768")
);
assert_eq!(
max * dec("0.000000001"),
dec("340282366920.938463463374607431")
);
assert_eq!(
max * dec("0.000000000001"),
dec("340282366.920938463463374607")
);
assert_eq!(
max * dec("0.000000000000001"),
dec("340282.366920938463463374")
);
assert_eq!(
max * dec("0.000000000000000001"),
dec("340.282366920938463463")
);
}
#[test]
#[should_panic(expected = "attempt to multiply with overflow")]
fn decimal_mul_overflow_panics() {
let _value = Decimal::MAX * Decimal::percent(101);
}
#[test]
fn decimal_checked_mul() {
let test_data = [
(Decimal::zero(), Decimal::zero()),
(Decimal::zero(), Decimal::one()),
(Decimal::one(), Decimal::zero()),
(Decimal::percent(10), Decimal::zero()),
(Decimal::percent(10), Decimal::percent(5)),
(Decimal::MAX, Decimal::one()),
(Decimal::MAX / 2u128.into(), Decimal::percent(200)),
(Decimal::permille(6), Decimal::permille(13)),
];
// The regular std::ops::Mul is our source of truth for these tests.
for (x, y) in test_data.iter().cloned() {
assert_eq!(x * y, x.checked_mul(y).unwrap());
}
}
#[test]
fn decimal_checked_mul_overflow() {
assert_eq!(
Decimal::MAX.checked_mul(Decimal::percent(200)),
Err(OverflowError {
operation: crate::OverflowOperation::Mul,
operand1: Decimal::MAX.to_string(),
operand2: Decimal::percent(200).to_string(),
})
);
}
#[test]
// in this test the Decimal is on the right
fn uint128_decimal_multiply() {
// a*b
let left = Uint128::new(300);
let right = Decimal::one() + Decimal::percent(50); // 1.5
assert_eq!(left * right, Uint128::new(450));
// a*0
let left = Uint128::new(300);
let right = Decimal::zero();
assert_eq!(left * right, Uint128::new(0));
// 0*a
let left = Uint128::new(0);
let right = Decimal::one() + Decimal::percent(50); // 1.5
assert_eq!(left * right, Uint128::new(0));
}
#[test]
// in this test the Decimal is on the left
fn decimal_uint128_multiply() {
// a*b
let left = Decimal::one() + Decimal::percent(50); // 1.5
let right = Uint128::new(300);
assert_eq!(left * right, Uint128::new(450));
// 0*a
let left = Decimal::zero();
let right = Uint128::new(300);
assert_eq!(left * right, Uint128::new(0));
// a*0
let left = Decimal::one() + Decimal::percent(50); // 1.5 | #[test]
fn decimal_uint128_division() {
// a/b
let left = Decimal::percent(150); // 1.5
let right = Uint128::new(3);
assert_eq!(left / right, Decimal::percent(50));
// 0/a
let left = Decimal::zero();
let right = Uint128::new(300);
assert_eq!(left / right, Decimal::zero());
}
#[test]
#[should_panic(expected = "attempt to divide by zero")]
fn decimal_uint128_divide_by_zero() {
let left = Decimal::percent(150); // 1.5
let right = Uint128::new(0);
let _result = left / right;
}
#[test]
fn decimal_uint128_div_assign() {
// a/b
let mut dec = Decimal::percent(150); // 1.5
dec /= Uint128::new(3);
assert_eq!(dec, Decimal::percent(50));
// 0/a
let mut dec = Decimal::zero();
dec /= Uint128::new(300);
assert_eq!(dec, Decimal::zero());
}
#[test]
#[should_panic(expected = "attempt to divide by zero")]
fn decimal_uint128_div_assign_by_zero() {
// a/0
let mut dec = Decimal::percent(50);
dec /= Uint128::new(0);
}
#[test]
fn decimal_uint128_sqrt() {
assert_eq!(Decimal::percent(900).sqrt(), Decimal::percent(300));
assert!(Decimal::percent(316) < Decimal::percent(1000).sqrt());
assert!(Decimal::percent(1000).sqrt() < Decimal::percent(317));
}
/// sqrt(2) is an irrational number, i.e. all 18 decimal places should be used.
#[test]
fn decimal_uint128_sqrt_is_precise() {
assert_eq!(
Decimal::from_str("2").unwrap().sqrt(),
Decimal::from_str("1.414213562373095048").unwrap() // https://www.wolframalpha.com/input/?i=sqrt%282%29
);
}
#[test]
fn decimal_uint128_sqrt_does_not_overflow() {
assert_eq!(
Decimal::from_str("400").unwrap().sqrt(),
Decimal::from_str("20").unwrap()
);
}
#[test]
fn decimal_uint128_sqrt_intermediate_precision_used() {
assert_eq!(
Decimal::from_str("400001").unwrap().sqrt(),
// The last two digits (27) are truncated below due to the algorithm
// we use. Larger numbers will cause less precision.
// https://www.wolframalpha.com/input/?i=sqrt%28400001%29
Decimal::from_str("632.456322602596803200").unwrap()
);
}
#[test]
fn decimal_checked_pow() {
for exp in 0..10 {
assert_eq!(Decimal::one().checked_pow(exp).unwrap(), Decimal::one());
}
// This case is mathematically undefined but we ensure consistency with Rust stdandard types
// https://play.rust-lang.org/?version=stable&mode=debug&edition=2021&gist=20df6716048e77087acd40194b233494
assert_eq!(Decimal::zero().checked_pow(0).unwrap(), Decimal::one());
for exp in 1..10 {
assert_eq!(Decimal::zero().checked_pow(exp).unwrap(), Decimal::zero());
}
for num in &[
Decimal::percent(50),
Decimal::percent(99),
Decimal::percent(200),
] {
assert_eq!(num.checked_pow(0).unwrap(), Decimal::one())
}
assert_eq!(
Decimal::percent(20).checked_pow(2).unwrap(),
Decimal::percent(4)
);
assert_eq!(
Decimal::percent(20).checked_pow(3).unwrap(),
Decimal::permille(8)
);
assert_eq!(
Decimal::percent(200).checked_pow(4).unwrap(),
Decimal::percent(1600)
);
assert_eq!(
Decimal::percent(200).checked_pow(4).unwrap(),
Decimal::percent(1600)
);
assert_eq!(
Decimal::percent(700).checked_pow(5).unwrap(),
Decimal::percent(1680700)
);
assert_eq!(
Decimal::percent(700).checked_pow(8).unwrap(),
Decimal::percent(576480100)
);
assert_eq!(
Decimal::percent(700).checked_pow(10).unwrap(),
Decimal::percent(28247524900)
);
assert_eq!(
Decimal::percent(120).checked_pow(123).unwrap(),
Decimal(5486473221892422150877397607u128.into())
);
assert_eq!(
Decimal::percent(10).checked_pow(2).unwrap(),
Decimal(10000000000000000u128.into())
);
assert_eq!(
Decimal::percent(10).checked_pow(18).unwrap(),
Decimal(1u128.into())
);
}
#[test]
fn decimal_checked_pow_overflow() {
assert_eq!(
Decimal::MAX.checked_pow(2),
Err(OverflowError {
operation: crate::OverflowOperation::Pow,
operand1: Decimal::MAX.to_string(),
operand2: "2".to_string(),
})
);
}
#[test]
fn decimal_to_string() {
// Integers
assert_eq!(Decimal::zero().to_string(), "0");
assert_eq!(Decimal::one().to_string(), "1");
assert_eq!(Decimal::percent(500).to_string(), "5");
// Decimals
assert_eq!(Decimal::percent(125).to_string(), "1.25");
assert_eq!(Decimal::percent(42638).to_string(), "426.38");
assert_eq!(Decimal::percent(3).to_string(), "0.03");
assert_eq!(Decimal::permille(987).to_string(), "0.987");
assert_eq!(
Decimal(Uint128::from(1u128)).to_string(),
"0.000000000000000001"
);
assert_eq!(
Decimal(Uint128::from(10u128)).to_string(),
"0.00000000000000001"
);
assert_eq!(
Decimal(Uint128::from(100u128)).to_string(),
"0.0000000000000001"
);
assert_eq!(
Decimal(Uint128::from(1000u128)).to_string(),
"0.000000000000001"
);
assert_eq!(
Decimal(Uint128::from(10000u128)).to_string(),
"0.00000000000001"
);
assert_eq!(
Decimal(Uint128::from(100000u128)).to_string(),
"0.0000000000001"
);
assert_eq!(
Decimal(Uint128::from(1000000u128)).to_string(),
"0.000000000001"
);
assert_eq!(
Decimal(Uint128::from(10000000u128)).to_string(),
"0.00000000001"
);
assert_eq!(
Decimal(Uint128::from(100000000u128)).to_string(),
"0.0000000001"
);
assert_eq!(
Decimal(Uint128::from(1000000000u128)).to_string(),
"0.000000001"
);
assert_eq!(
Decimal(Uint128::from(10000000000u128)).to_string(),
"0.00000001"
);
assert_eq!(
Decimal(Uint128::from(100000000000u128)).to_string(),
"0.0000001"
);
assert_eq!(
Decimal(Uint128::from(10000000000000u128)).to_string(),
"0.00001"
);
assert_eq!(
Decimal(Uint128::from(100000000000000u128)).to_string(),
"0.0001"
);
assert_eq!(
Decimal(Uint128::from(1000000000000000u128)).to_string(),
"0.001"
);
assert_eq!(
Decimal(Uint128::from(10000000000000000u128)).to_string(),
"0.01"
);
assert_eq!(
Decimal(Uint128::from(100000000000000000u128)).to_string(),
"0.1"
);
}
#[test]
fn decimal_iter_sum() {
let items = vec![
Decimal::zero(),
Decimal(Uint128::from(2u128)),
Decimal(Uint128::from(2u128)),
];
assert_eq!(items.iter().sum::<Decimal>(), Decimal(Uint128::from(4u128)));
assert_eq!(
items.into_iter().sum::<Decimal>(),
Decimal(Uint128::from(4u128))
);
let empty: Vec<Decimal> = vec![];
assert_eq!(Decimal::zero(), empty.iter().sum());
}
#[test]
fn decimal_serialize() {
assert_eq!(to_vec(&Decimal::zero()).unwrap(), br#""0""#);
assert_eq!(to_vec(&Decimal::one()).unwrap(), br#""1""#);
assert_eq!(to_vec(&Decimal::percent(8)).unwrap(), br#""0.08""#);
assert_eq!(to_vec(&Decimal::percent(87)).unwrap(), br#""0.87""#);
assert_eq!(to_vec(&Decimal::percent(876)).unwrap(), br#""8.76""#);
assert_eq!(to_vec(&Decimal::percent(8765)).unwrap(), br#""87.65""#);
}
#[test]
fn decimal_deserialize() {
assert_eq!(from_slice::<Decimal>(br#""0""#).unwrap(), Decimal::zero());
assert_eq!(from_slice::<Decimal>(br#""1""#).unwrap(), Decimal::one());
assert_eq!(from_slice::<Decimal>(br#""000""#).unwrap(), Decimal::zero());
assert_eq!(from_slice::<Decimal>(br#""001""#).unwrap(), Decimal::one());
assert_eq!(
from_slice::<Decimal>(br#""0.08""#).unwrap(),
Decimal::percent(8)
);
assert_eq!(
from_slice::<Decimal>(br#""0.87""#).unwrap(),
Decimal::percent(87)
);
assert_eq!(
from_slice::<Decimal>(br#""8.76""#).unwrap(),
Decimal::percent(876)
);
assert_eq!(
from_slice::<Decimal>(br#""87.65""#).unwrap(),
Decimal::percent(8765)
);
}
} | let right = Uint128::new(0);
assert_eq!(left * right, Uint128::new(0));
}
|
advanced.rs | extern crate env_logger;
use pgnparse::parser::*;
fn main() | {
env_logger::init();
let mut book = Book::new().me("chesshyperbot");
book.parse("test.pgn");
let pos = book.positions.get("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq -");
println!("pos for epd = {:?}", pos);
if let Some(pos) = pos {
let m = pos.get_random_weighted_by_plays();
println!("random weighted by plays = {:?} , plays = {}", m, m.unwrap().plays());
let m = pos.get_random_weighted_by_perf();
println!("random weighted by perf = {:?} , perf = {}", m, m.unwrap().perf());
let m = pos.get_random_mixed(50);
println!("random mixed = {:?}", m);
}
} |
|
index.js | import React, {PropTypes} from 'react';
import { connect } from 'react-redux';
import CaseStudy from 'components/case-study';
import BackBar from 'components/ui/backbar';
import PageLayout from 'layouts/PageLayout/PageLayout';
import has from 'lodash/has';
import snakeCase from 'lodash/snakeCase';
const mapStateToProps = (state) => ({
work: state.work,
viewport: state.viewport
});
export class | extends PageLayout {
static propTypes = {
dispatch: PropTypes.func,
work: PropTypes.object,
toggleNav: PropTypes.func,
viewport: PropTypes.viewport
};
static contextTypes = {
router: PropTypes.object
};
constructor (props) {
super(props);
this.state = {
singleMode: false,
activeStudy: null
};
}
goBack () {
TweenLite.to(document.body, 0.5, {autoAlpha: 0, ease: Circ.easeInOut, className: '+=isHidden', onComplete: () => {
this.context.router.push({pathname: '/work'});
}});
}
getCurrentCaseStudy () {
let current = {};
this.props.work.studyData.filter(function (a, b) {
if (snakeCase(a.get('title')) === this.props.params.project) {
current.study = a;
}
}, this);
return current;
}
createCurrentStudy (current) {
return (
<CaseStudy showBody {...this.props} singleMode data={current.study}>
<BackBar showBar singleMode data={current.study.toJS()} goBack={this.goBack.bind(this)} />
</CaseStudy>
);
}
getCurrentStudy () {
const current = this.getCurrentCaseStudy();
if (has(current, 'study')) {
return this.createCurrentStudy(current);
} else {
this.context.router.push({pathname: '/404'});
}
}
componentDidMount () {
this.actions.changeNavState({isVisible: false, shouldAnimate: false});
TweenLite.set('#footer', {autoAlpha: 0});
TweenLite.set(document.documentElement, {overflowY: 'hidden'});
TweenLite.fromTo(this.refs.caseStudy, 1, {autoAlpha: 0}, {autoAlpha: 1, ease: Expo.easeInOut, delay: 1});
}
componentWillUnmount () {
this.actions.changeNavState({isVisible: true});
TweenLite.set('#footer', {autoAlpha: 1});
TweenLite.set(document.documentElement, {clearProps: 'overflow'});
}
hideInactiveProjects (activeStudy, singleMode) {
this.setState({singleMode: singleMode, activeStudy: activeStudy});
}
render () {
return (
<div ref='caseStudy' style={{opacity: 0, position: 'absolute', top: 0, left: 0, width: '100%', height: '100%'}}>
{this.getCurrentStudy()}
</div>
);
}
}
export default connect(mapStateToProps)(Direct);
| Direct |
conf.py | # -*- coding: utf-8 -*-
#
# OpenFAST documentation build configuration file, created by
# sphinx-quickstart on Wed Jan 25 13:52:07 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
import subprocess
import re
#sys.path.append(os.path.abspath('_extensions/'))
readTheDocs = os.environ.get('READTHEDOCS', None) == 'True'
builddir = sys.argv[-1]
sourcedir = sys.argv[-2]
# Use this to turn Doxygen on or off
useDoxygen = True
# This function was adapted from https://gitlab.kitware.com/cmb/smtk
# Only run when on readthedocs
def runDoxygen(sourcfile, doxyfileIn, doxyfileOut):
dx = open(os.path.join(sourcedir, doxyfileIn), 'r')
cfg = dx.read()
srcdir = os.path.abspath(os.path.join(os.getcwd(), '..'))
bindir = srcdir
c2 = re.sub('@CMAKE_SOURCE_DIR@', srcdir, re.sub('@CMAKE_BINARY_DIR@', bindir, cfg))
doxname = os.path.join(sourcedir, doxyfileOut)
dox = open(doxname, 'w')
print(c2, file=dox)
dox.close()
print("Running Doxygen on {}".format(doxyfileOut))
doxproc = subprocess.call(('doxygen', doxname))
if readTheDocs and useDoxygen:
runDoxygen(sourcedir, 'Doxyfile.in', 'Doxyfile')
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.5.2'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.mathjax',
'sphinx.ext.intersphinx',
'sphinxcontrib.doxylink',
'sphinxcontrib.bibtex',
]
autodoc_default_flags = [
'members',
'show-inheritance',
'undoc-members'
]
autoclass_content = 'both'
mathjax_path = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML'
# FIXME: Naively assuming build directory one level up locally, and two up on readthedocs
if useDoxygen:
if readTheDocs:
doxylink = {
'openfast': (
os.path.join(builddir, '..', '..', 'openfast.tag'),
os.path.join('html')
)
}
else:
doxylink = {
'openfast': (
os.path.join(builddir, '..', 'openfast.tag'),
os.path.join('html')
)
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ['.rst']
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'OpenFAST'
copyright = u'2017, National Renewable Energy Laboratory'
author = u'OpenFAST Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'2.1'
# The full version, including alpha/beta/rc tags.
release = u'v2.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs. |
#If true, figures, tables and code-blocks are automatically numbered if they
#have a caption. At same time, the numref role is enabled. For now, it works
#only with the HTML builder and LaTeX builder. Default is False.
numfig = True
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# FIXME: Naively assuming build directory one level up locally, and two up on readthedocs
if useDoxygen:
if readTheDocs:
html_extra_path = [os.path.join(builddir, '..', '..', 'doxygen')]
else:
html_extra_path = [os.path.join(builddir, '..', 'doxygen')]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
html_logo = '_static/openfastlogo.jpg'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
"analytics_id": "UA-68999653-10"
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Openfastdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
'Openfast.tex',
u'OpenFAST Documentation',
u'National Renewable Energy Laboratory',
'manual'
),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
master_doc,
'openfast',
u'OpenFAST Documentation',
[author],
1
)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
'OpenFAST',
u'OpenFAST Documentation',
author,
'OpenFAST',
'One line description of project.',
'Miscellaneous'
),
]
def setup(app):
app.add_object_type(
"confval",
"confval",
objname="input file parameter",
indextemplate="pair: %s; input file parameter"
)
app.add_object_type(
"cmakeval",
"cmakeval",
objname="CMake configuration value",
indextemplate="pair: %s; CMake configuration"
) | # Usually you set "language" from the command line for these cases.
language = None |
compact_test.go | package compact
import (
"testing"
"github.com/thanos-io/thanos/pkg/block/metadata"
"github.com/pkg/errors"
terrors "github.com/prometheus/prometheus/tsdb/errors"
"github.com/thanos-io/thanos/pkg/testutil"
)
func TestHaltError(t *testing.T) {
err := errors.New("test")
testutil.Assert(t, !IsHaltError(err), "halt error")
err = halt(errors.New("test"))
testutil.Assert(t, IsHaltError(err), "not a halt error")
err = errors.Wrap(halt(errors.New("test")), "something")
testutil.Assert(t, IsHaltError(err), "not a halt error")
err = errors.Wrap(errors.Wrap(halt(errors.New("test")), "something"), "something2")
testutil.Assert(t, IsHaltError(err), "not a halt error")
}
func TestHaltMultiError(t *testing.T) {
haltErr := halt(errors.New("halt error"))
nonHaltErr := errors.New("not a halt error")
errs := terrors.MultiError{nonHaltErr}
testutil.Assert(t, !IsHaltError(errs), "should not be a halt error")
errs.Add(haltErr)
testutil.Assert(t, IsHaltError(errs), "if any halt errors are present this should return true")
}
func TestRetryMultiError(t *testing.T) |
func TestRetryError(t *testing.T) {
err := errors.New("test")
testutil.Assert(t, !IsRetryError(err), "retry error")
err = retry(errors.New("test"))
testutil.Assert(t, IsRetryError(err), "not a retry error")
err = errors.Wrap(retry(errors.New("test")), "something")
testutil.Assert(t, IsRetryError(err), "not a retry error")
err = errors.Wrap(errors.Wrap(retry(errors.New("test")), "something"), "something2")
testutil.Assert(t, IsRetryError(err), "not a retry error")
err = errors.Wrap(retry(errors.Wrap(halt(errors.New("test")), "something")), "something2")
testutil.Assert(t, IsHaltError(err), "not a halt error. Retry should not hide halt error")
}
func TestGroupKey(t *testing.T) {
for _, tcase := range []struct {
input metadata.Thanos
expected string
}{
{
input: metadata.Thanos{},
expected: "0@17241709254077376921",
},
{
input: metadata.Thanos{
Labels: map[string]string{},
Downsample: metadata.ThanosDownsample{Resolution: 0},
},
expected: "0@17241709254077376921",
},
{
input: metadata.Thanos{
Labels: map[string]string{"foo": "bar", "foo1": "bar2"},
Downsample: metadata.ThanosDownsample{Resolution: 0},
},
expected: "0@2124638872457683483",
},
{
input: metadata.Thanos{
Labels: map[string]string{`foo/some..thing/some.thing/../`: `a_b_c/bar-something-a\metric/a\x`},
Downsample: metadata.ThanosDownsample{Resolution: 0},
},
expected: "0@16590761456214576373",
},
} {
if ok := t.Run("", func(t *testing.T) {
testutil.Equals(t, tcase.expected, GroupKey(tcase.input))
}); !ok {
return
}
}
}
| {
retryErr := retry(errors.New("retry error"))
nonRetryErr := errors.New("not a retry error")
errs := terrors.MultiError{nonRetryErr}
testutil.Assert(t, !IsRetryError(errs), "should not be a retry error")
errs = terrors.MultiError{retryErr}
testutil.Assert(t, IsRetryError(errs), "if all errors are retriable this should return true")
errs = terrors.MultiError{nonRetryErr, retryErr}
testutil.Assert(t, !IsRetryError(errs), "mixed errors should return false")
} |
stack_meta_test.go | package stack_mgr_test
import (
"context"
"testing"
"github.com/aws/aws-sdk-go-v2/service/ssm"
ssmtypes "github.com/aws/aws-sdk-go-v2/service/ssm/types"
"github.com/chanzuckerberg/happy/mocks"
backend "github.com/chanzuckerberg/happy/pkg/backend/aws"
"github.com/chanzuckerberg/happy/pkg/backend/aws/interfaces"
"github.com/chanzuckerberg/happy/pkg/backend/aws/testbackend"
"github.com/chanzuckerberg/happy/pkg/config"
"github.com/chanzuckerberg/happy/pkg/stack_mgr"
"github.com/golang/mock/gomock"
"github.com/stretchr/testify/require"
)
const testFilePath = "../config/testdata/test_config.yaml"
const testDockerComposePath = "../config/testdata/docker-compose.yml"
func TestUpdate(t *testing.T) | {
ctx := context.Background()
r := require.New(t)
ctrl := gomock.NewController(t)
bootstrapConfig := &config.Bootstrap{
HappyConfigPath: testFilePath,
DockerComposeConfigPath: testDockerComposePath,
Env: "rdev",
}
config, err := config.NewHappyConfig(bootstrapConfig)
r.NoError(err)
dataMap := map[string]string{
"app": config.App(),
"env": config.GetEnv(),
"instance": "test-stack",
}
tagMap := map[string]string{
"app": "happy/app",
"env": "happy/env",
"instance": "happy/instance",
"owner": "happy/meta/owner",
"priority": "happy/meta/priority",
"slice": "happy/meta/slice",
"imagetag": "happy/meta/imagetag",
"imagetags": "happy/meta/imagetags",
"configsecret": "happy/meta/configsecret",
"created": "happy/meta/created-at",
"updated": "happy/meta/updated-at",
}
paramMap := map[string]string{
"instance": "stack_name",
"slice": "slice",
"priority": "priority",
"imagetag": "image_tag",
"imagetags": "image_tags",
"configsecret": "happy_config_secret",
}
stackMeta := &stack_mgr.StackMeta{
StackName: "test-stack",
DataMap: dataMap,
TagMap: tagMap,
ParamMap: paramMap,
}
// mock the backend
ssmMock := interfaces.NewMockSSMAPI(ctrl)
retVal := "[\"stack_1\",\"stack_2\"]"
ret := &ssm.GetParameterOutput{
Parameter: &ssmtypes.Parameter{Value: &retVal},
}
ssmMock.EXPECT().GetParameter(gomock.Any(), gomock.Any()).Return(ret, nil)
// mock the workspace GetTags method, used in setPriority()
mockWorkspace1 := mocks.NewMockWorkspace(ctrl)
mockWorkspace1.EXPECT().GetTags().Return(map[string]string{"tag-1": "testing-1"}, nil)
mockWorkspace2 := mocks.NewMockWorkspace(ctrl)
mockWorkspace2.EXPECT().GetTags().Return(map[string]string{"tag-2": "testing-2"}, nil)
// mock the executor
mockWorkspaceRepo := mocks.NewMockWorkspaceRepoIface(ctrl)
first := mockWorkspaceRepo.EXPECT().GetWorkspace(gomock.Any(), gomock.Any()).Return(mockWorkspace1, nil)
second := mockWorkspaceRepo.EXPECT().GetWorkspace(gomock.Any(), gomock.Any()).Return(mockWorkspace2, nil)
gomock.InOrder(first, second)
backend, err := testbackend.NewBackend(ctx, ctrl, config, backend.WithSSMClient(ssmMock))
r.NoError(err)
stackMgr := stack_mgr.NewStackService().WithBackend(backend).WithWorkspaceRepo(mockWorkspaceRepo)
err = stackMeta.Update(ctx, "test-tag", make(map[string]string), "", stackMgr)
r.NoError(err)
r.Equal("{}", stackMeta.GetTags()["happy/meta/imagetags"])
err = stackMeta.Update(ctx, "test-tag", map[string]string{"foo": "bar"}, "", stackMgr)
r.NoError(err)
r.Equal("{\"foo\":\"bar\"}", stackMeta.GetTags()["happy/meta/imagetags"])
} |
|
search-timeline.component.ts | import { Component, Input } from "@angular/core";
import { decades } from "../../services/catalog.service";
@Component({
selector: "search-timeline",
templateUrl: "./search-timeline.component.html",
styleUrls: ["./search-timeline.component.css"],
})
export class | {
timelineRange: number[] = decades();
@Input() countByYears: any;
@Input() countMissingDate: number = 0;
constructor() {}
}
| SearchTimelineComponent |
init_monitoring_xpack_info.js | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { checkLicenseGenerator } from './cluster_alerts/check_license';
import { LOGGING_TAG } from '../common/constants';
/*
* Expose xpackInfo for the Monitoring cluster as server.plugins.monitoring.info
*/
export const initMonitoringXpackInfo = async server => {
const config = server.config();
const xpackInfoOptions = {
clusterSource: 'monitoring',
pollFrequencyInMillis: config.get('xpack.monitoring.xpack_api_polling_frequency_millis')
};
const xpackInfo = server.plugins.xpack_main.createXPackInfo(xpackInfoOptions);
xpackInfo.feature('monitoring').registerLicenseCheckResultsGenerator(checkLicenseGenerator);
| if (!xpackInfoTest.isAvailable()) {
server.log([LOGGING_TAG, 'warning'], `X-Pack Monitoring Cluster Alerts will not be available: ${xpackInfoTest.unavailableReason()}`);
}
}; | server.expose('info', xpackInfo);
// check if X-Pack is installed on Monitoring Cluster
const xpackInfoTest = await xpackInfo.refreshNow();
|
exec.go | package ops
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io/ioutil"
"net"
"os"
"path"
"path/filepath"
"sort"
"strings"
"sync"
"time"
"github.com/containerd/containerd/mount"
"github.com/containerd/containerd/platforms"
"github.com/docker/docker/pkg/idtools"
"github.com/docker/docker/pkg/locker"
"github.com/moby/buildkit/cache"
"github.com/moby/buildkit/cache/metadata"
"github.com/moby/buildkit/client"
"github.com/moby/buildkit/executor"
"github.com/moby/buildkit/identity"
"github.com/moby/buildkit/session"
"github.com/moby/buildkit/session/secrets"
"github.com/moby/buildkit/session/sshforward"
"github.com/moby/buildkit/snapshot"
"github.com/moby/buildkit/solver"
"github.com/moby/buildkit/solver/llbsolver"
"github.com/moby/buildkit/solver/pb"
"github.com/moby/buildkit/util/progress/logs"
utilsystem "github.com/moby/buildkit/util/system"
"github.com/moby/buildkit/worker"
digest "github.com/opencontainers/go-digest"
specs "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/opencontainers/runc/libcontainer/system"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
bolt "go.etcd.io/bbolt"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
const execCacheType = "buildkit.exec.v0"
type execOp struct {
op *pb.ExecOp
cm cache.Manager
sm *session.Manager
md *metadata.Store
exec executor.Executor
w worker.Worker
platform *pb.Platform
numInputs int
cacheMounts map[string]*cacheRefShare
cacheMountsMu sync.Mutex
}
func NewExecOp(v solver.Vertex, op *pb.Op_Exec, platform *pb.Platform, cm cache.Manager, sm *session.Manager, md *metadata.Store, exec executor.Executor, w worker.Worker) (solver.Op, error) {
if err := llbsolver.ValidateOp(&pb.Op{Op: op}); err != nil {
return nil, err
}
return &execOp{
op: op.Exec,
cm: cm,
sm: sm,
md: md,
exec: exec,
numInputs: len(v.Inputs()),
w: w,
platform: platform,
cacheMounts: map[string]*cacheRefShare{},
}, nil
}
func cloneExecOp(old *pb.ExecOp) pb.ExecOp {
n := *old
meta := *n.Meta
meta.ExtraHosts = nil
for i := range n.Meta.ExtraHosts {
h := *n.Meta.ExtraHosts[i]
meta.ExtraHosts = append(meta.ExtraHosts, &h)
}
n.Meta = &meta
n.Mounts = nil
for i := range n.Mounts {
m := *n.Mounts[i]
n.Mounts = append(n.Mounts, &m)
}
return n
}
func (e *execOp) CacheMap(ctx context.Context, index int) (*solver.CacheMap, bool, error) {
op := cloneExecOp(e.op)
for i := range op.Meta.ExtraHosts {
h := op.Meta.ExtraHosts[i]
h.IP = ""
op.Meta.ExtraHosts[i] = h
}
for i := range op.Mounts {
op.Mounts[i].Selector = ""
}
op.Meta.ProxyEnv = nil
p := platforms.DefaultSpec()
if e.platform != nil {
p = specs.Platform{
OS: e.platform.OS,
Architecture: e.platform.Architecture,
Variant: e.platform.Variant,
}
}
dt, err := json.Marshal(struct {
Type string
Exec *pb.ExecOp
OS string
Arch string
Variant string `json:",omitempty"`
}{
Type: execCacheType,
Exec: &op,
OS: p.OS,
Arch: p.Architecture,
Variant: p.Variant,
})
if err != nil {
return nil, false, err
}
cm := &solver.CacheMap{
Digest: digest.FromBytes(dt),
Deps: make([]struct {
Selector digest.Digest
ComputeDigestFunc solver.ResultBasedCacheFunc
}, e.numInputs),
}
deps, err := e.getMountDeps()
if err != nil {
return nil, false, err
}
for i, dep := range deps {
if len(dep.Selectors) != 0 {
dgsts := make([][]byte, 0, len(dep.Selectors))
for _, p := range dep.Selectors {
dgsts = append(dgsts, []byte(p))
}
cm.Deps[i].Selector = digest.FromBytes(bytes.Join(dgsts, []byte{0}))
}
if !dep.NoContentBasedHash {
cm.Deps[i].ComputeDigestFunc = llbsolver.NewContentHashFunc(toSelectors(dedupePaths(dep.Selectors)))
}
}
return cm, true, nil
}
func dedupePaths(inp []string) []string {
old := make(map[string]struct{}, len(inp))
for _, p := range inp {
old[p] = struct{}{}
}
paths := make([]string, 0, len(old))
for p1 := range old {
var skip bool
for p2 := range old {
if p1 != p2 && strings.HasPrefix(p1, p2+"/") {
skip = true
break
}
}
if !skip {
paths = append(paths, p1)
}
}
sort.Slice(paths, func(i, j int) bool {
return paths[i] < paths[j]
})
return paths
}
func toSelectors(p []string) []llbsolver.Selector {
sel := make([]llbsolver.Selector, 0, len(p))
for _, p := range p {
sel = append(sel, llbsolver.Selector{Path: p, FollowLinks: true})
}
return sel
}
type dep struct {
Selectors []string
NoContentBasedHash bool
}
func (e *execOp) getMountDeps() ([]dep, error) {
deps := make([]dep, e.numInputs)
for _, m := range e.op.Mounts {
if m.Input == pb.Empty {
continue
}
if int(m.Input) >= len(deps) {
return nil, errors.Errorf("invalid mountinput %v", m)
}
sel := m.Selector
if sel != "" {
sel = path.Join("/", sel)
deps[m.Input].Selectors = append(deps[m.Input].Selectors, sel)
}
if (!m.Readonly || m.Dest == pb.RootMount) && m.Output != -1 { // exclude read-only rootfs && read-write mounts
deps[m.Input].NoContentBasedHash = true
}
}
return deps, nil
}
func (e *execOp) getRefCacheDir(ctx context.Context, ref cache.ImmutableRef, id string, m *pb.Mount, sharing pb.CacheSharingOpt) (mref cache.MutableRef, err error) {
g := &cacheRefGetter{
locker: &e.cacheMountsMu,
cacheMounts: e.cacheMounts,
cm: e.cm,
md: e.md,
globalCacheRefs: sharedCacheRefs,
name: fmt.Sprintf("cached mount %s from exec %s", m.Dest, strings.Join(e.op.Meta.Args, " ")),
}
return g.getRefCacheDir(ctx, ref, id, sharing)
}
type cacheRefGetter struct {
locker sync.Locker
cacheMounts map[string]*cacheRefShare
cm cache.Manager
md *metadata.Store
globalCacheRefs *cacheRefs
name string
}
func (g *cacheRefGetter) getRefCacheDir(ctx context.Context, ref cache.ImmutableRef, id string, sharing pb.CacheSharingOpt) (mref cache.MutableRef, err error) {
key := "cache-dir:" + id
if ref != nil {
key += ":" + ref.ID()
}
mu := g.locker
mu.Lock()
defer mu.Unlock()
if ref, ok := g.cacheMounts[key]; ok {
return ref.clone(), nil
}
defer func() {
if err == nil {
share := &cacheRefShare{MutableRef: mref, refs: map[*cacheRef]struct{}{}}
g.cacheMounts[key] = share
mref = share.clone()
}
}()
switch sharing {
case pb.CacheSharingOpt_SHARED:
return g.globalCacheRefs.get(key, func() (cache.MutableRef, error) {
return g.getRefCacheDirNoCache(ctx, key, ref, id, false)
})
case pb.CacheSharingOpt_PRIVATE:
return g.getRefCacheDirNoCache(ctx, key, ref, id, false)
case pb.CacheSharingOpt_LOCKED:
return g.getRefCacheDirNoCache(ctx, key, ref, id, true)
default:
return nil, errors.Errorf("invalid cache sharing option: %s", sharing.String())
}
}
func (g *cacheRefGetter) getRefCacheDirNoCache(ctx context.Context, key string, ref cache.ImmutableRef, id string, block bool) (cache.MutableRef, error) {
makeMutable := func(ref cache.ImmutableRef) (cache.MutableRef, error) {
return g.cm.New(ctx, ref, cache.WithRecordType(client.UsageRecordTypeCacheMount), cache.WithDescription(g.name), cache.CachePolicyRetain)
}
cacheRefsLocker.Lock(key)
defer cacheRefsLocker.Unlock(key)
for {
sis, err := g.md.Search(key)
if err != nil {
return nil, err
}
locked := false
for _, si := range sis {
if mRef, err := g.cm.GetMutable(ctx, si.ID()); err == nil {
logrus.Debugf("reusing ref for cache dir: %s", mRef.ID())
return mRef, nil
} else if errors.Is(err, cache.ErrLocked) {
locked = true
}
}
if block && locked {
cacheRefsLocker.Unlock(key)
select {
case <-ctx.Done():
cacheRefsLocker.Lock(key)
return nil, ctx.Err()
case <-time.After(100 * time.Millisecond):
cacheRefsLocker.Lock(key)
}
} else {
break
}
}
mRef, err := makeMutable(ref)
if err != nil {
return nil, err
}
si, _ := g.md.Get(mRef.ID())
v, err := metadata.NewValue(key)
if err != nil {
mRef.Release(context.TODO())
return nil, err
} | }); err != nil {
mRef.Release(context.TODO())
return nil, err
}
return mRef, nil
}
func (e *execOp) getSSHMountable(ctx context.Context, m *pb.Mount) (cache.Mountable, error) {
sessionID := session.FromContext(ctx)
if sessionID == "" {
return nil, errors.New("could not access local files without session")
}
timeoutCtx, cancel := context.WithTimeout(ctx, 5*time.Second)
defer cancel()
caller, err := e.sm.Get(timeoutCtx, sessionID)
if err != nil {
return nil, err
}
if err := sshforward.CheckSSHID(ctx, caller, m.SSHOpt.ID); err != nil {
if m.SSHOpt.Optional {
return nil, nil
}
if st, ok := status.FromError(errors.Cause(err)); ok && st.Code() == codes.Unimplemented {
return nil, errors.Errorf("no SSH key %q forwarded from the client", m.SSHOpt.ID)
}
return nil, err
}
return &sshMount{mount: m, caller: caller, idmap: e.cm.IdentityMapping()}, nil
}
type sshMount struct {
mount *pb.Mount
caller session.Caller
idmap *idtools.IdentityMapping
}
func (sm *sshMount) Mount(ctx context.Context, readonly bool) (snapshot.Mountable, error) {
return &sshMountInstance{sm: sm, idmap: sm.idmap}, nil
}
type sshMountInstance struct {
sm *sshMount
idmap *idtools.IdentityMapping
}
func (sm *sshMountInstance) Mount() ([]mount.Mount, func() error, error) {
ctx, cancel := context.WithCancel(context.TODO())
uid := int(sm.sm.mount.SSHOpt.Uid)
gid := int(sm.sm.mount.SSHOpt.Gid)
if sm.idmap != nil {
identity, err := sm.idmap.ToHost(idtools.Identity{
UID: uid,
GID: gid,
})
if err != nil {
return nil, nil, err
}
uid = identity.UID
gid = identity.GID
}
sock, cleanup, err := sshforward.MountSSHSocket(ctx, sm.sm.caller, sshforward.SocketOpt{
ID: sm.sm.mount.SSHOpt.ID,
UID: uid,
GID: gid,
Mode: int(sm.sm.mount.SSHOpt.Mode & 0777),
})
if err != nil {
cancel()
return nil, nil, err
}
release := func() error {
var err error
if cleanup != nil {
err = cleanup()
}
cancel()
return err
}
return []mount.Mount{{
Type: "bind",
Source: sock,
Options: []string{"rbind"},
}}, release, nil
}
func (sm *sshMountInstance) IdentityMapping() *idtools.IdentityMapping {
return sm.idmap
}
func (e *execOp) getSecretMountable(ctx context.Context, m *pb.Mount) (cache.Mountable, error) {
if m.SecretOpt == nil {
return nil, errors.Errorf("invalid sercet mount options")
}
sopt := *m.SecretOpt
id := sopt.ID
if id == "" {
return nil, errors.Errorf("secret ID missing from mount options")
}
sessionID := session.FromContext(ctx)
if sessionID == "" {
return nil, errors.New("could not access local files without session")
}
timeoutCtx, cancel := context.WithTimeout(ctx, 5*time.Second)
defer cancel()
caller, err := e.sm.Get(timeoutCtx, sessionID)
if err != nil {
return nil, err
}
dt, err := secrets.GetSecret(ctx, caller, id)
if err != nil {
if errors.Is(err, secrets.ErrNotFound) && m.SecretOpt.Optional {
return nil, nil
}
return nil, err
}
return &secretMount{mount: m, data: dt, idmap: e.cm.IdentityMapping()}, nil
}
type secretMount struct {
mount *pb.Mount
data []byte
idmap *idtools.IdentityMapping
}
func (sm *secretMount) Mount(ctx context.Context, readonly bool) (snapshot.Mountable, error) {
return &secretMountInstance{sm: sm, idmap: sm.idmap}, nil
}
type secretMountInstance struct {
sm *secretMount
root string
idmap *idtools.IdentityMapping
}
func (sm *secretMountInstance) Mount() ([]mount.Mount, func() error, error) {
dir, err := ioutil.TempDir("", "buildkit-secrets")
if err != nil {
return nil, nil, errors.Wrap(err, "failed to create temp dir")
}
cleanupDir := func() error {
return os.RemoveAll(dir)
}
if err := os.Chmod(dir, 0711); err != nil {
cleanupDir()
return nil, nil, err
}
tmpMount := mount.Mount{
Type: "tmpfs",
Source: "tmpfs",
Options: []string{"nodev", "nosuid", "noexec", fmt.Sprintf("uid=%d,gid=%d", os.Geteuid(), os.Getegid())},
}
if system.RunningInUserNS() {
tmpMount.Options = nil
}
if err := mount.All([]mount.Mount{tmpMount}, dir); err != nil {
cleanupDir()
return nil, nil, errors.Wrap(err, "unable to setup secret mount")
}
sm.root = dir
cleanup := func() error {
if err := mount.Unmount(dir, 0); err != nil {
return err
}
return cleanupDir()
}
randID := identity.NewID()
fp := filepath.Join(dir, randID)
if err := ioutil.WriteFile(fp, sm.sm.data, 0600); err != nil {
cleanup()
return nil, nil, err
}
uid := int(sm.sm.mount.SecretOpt.Uid)
gid := int(sm.sm.mount.SecretOpt.Gid)
if sm.idmap != nil {
identity, err := sm.idmap.ToHost(idtools.Identity{
UID: uid,
GID: gid,
})
if err != nil {
cleanup()
return nil, nil, err
}
uid = identity.UID
gid = identity.GID
}
if err := os.Chown(fp, uid, gid); err != nil {
cleanup()
return nil, nil, err
}
if err := os.Chmod(fp, os.FileMode(sm.sm.mount.SecretOpt.Mode&0777)); err != nil {
cleanup()
return nil, nil, err
}
return []mount.Mount{{
Type: "bind",
Source: fp,
Options: []string{"ro", "rbind", "nodev", "nosuid", "noexec"},
}}, cleanup, nil
}
func (sm *secretMountInstance) IdentityMapping() *idtools.IdentityMapping {
return sm.idmap
}
func addDefaultEnvvar(env []string, k, v string) []string {
for _, e := range env {
if strings.HasPrefix(e, k+"=") {
return env
}
}
return append(env, k+"="+v)
}
func (e *execOp) Exec(ctx context.Context, inputs []solver.Result) ([]solver.Result, error) {
var mounts []executor.Mount
var root cache.Mountable
var readonlyRootFS bool
var outputs []cache.Ref
defer func() {
for _, o := range outputs {
if o != nil {
go o.Release(context.TODO())
}
}
}()
// loop over all mounts, fill in mounts, root and outputs
for _, m := range e.op.Mounts {
var mountable cache.Mountable
var ref cache.ImmutableRef
if m.Dest == pb.RootMount && m.MountType != pb.MountType_BIND {
return nil, errors.Errorf("invalid mount type %s for %s", m.MountType.String(), m.Dest)
}
// if mount is based on input validate and load it
if m.Input != pb.Empty {
if int(m.Input) > len(inputs) {
return nil, errors.Errorf("missing input %d", m.Input)
}
inp := inputs[int(m.Input)]
workerRef, ok := inp.Sys().(*worker.WorkerRef)
if !ok {
return nil, errors.Errorf("invalid reference for exec %T", inp.Sys())
}
ref = workerRef.ImmutableRef
mountable = ref
}
makeMutable := func(ref cache.ImmutableRef) (cache.MutableRef, error) {
desc := fmt.Sprintf("mount %s from exec %s", m.Dest, strings.Join(e.op.Meta.Args, " "))
return e.cm.New(ctx, ref, cache.WithDescription(desc))
}
switch m.MountType {
case pb.MountType_BIND:
// if mount creates an output
if m.Output != pb.SkipOutput {
// it it is readonly and not root then output is the input
if m.Readonly && ref != nil && m.Dest != pb.RootMount {
outputs = append(outputs, ref.Clone())
} else {
// otherwise output and mount is the mutable child
active, err := makeMutable(ref)
if err != nil {
return nil, err
}
outputs = append(outputs, active)
mountable = active
}
} else if (!m.Readonly || ref == nil) && m.Dest != pb.RootMount {
// this case is empty readonly scratch without output that is not really useful for anything but don't error
active, err := makeMutable(ref)
if err != nil {
return nil, err
}
defer active.Release(context.TODO())
mountable = active
}
case pb.MountType_CACHE:
if m.CacheOpt == nil {
return nil, errors.Errorf("missing cache mount options")
}
mRef, err := e.getRefCacheDir(ctx, ref, m.CacheOpt.ID, m, m.CacheOpt.Sharing)
if err != nil {
return nil, err
}
mountable = mRef
defer func() {
go mRef.Release(context.TODO())
}()
if m.Output != pb.SkipOutput && ref != nil {
outputs = append(outputs, ref.Clone())
}
case pb.MountType_TMPFS:
mountable = newTmpfs(e.cm.IdentityMapping())
case pb.MountType_SECRET:
secretMount, err := e.getSecretMountable(ctx, m)
if err != nil {
return nil, err
}
if secretMount == nil {
continue
}
mountable = secretMount
case pb.MountType_SSH:
sshMount, err := e.getSSHMountable(ctx, m)
if err != nil {
return nil, err
}
if sshMount == nil {
continue
}
mountable = sshMount
default:
return nil, errors.Errorf("mount type %s not implemented", m.MountType)
}
// validate that there is a mount
if mountable == nil {
return nil, errors.Errorf("mount %s has no input", m.Dest)
}
// if dest is root we need mutable ref even if there is no output
if m.Dest == pb.RootMount {
root = mountable
readonlyRootFS = m.Readonly
if m.Output == pb.SkipOutput && readonlyRootFS {
active, err := makeMutable(ref)
if err != nil {
return nil, err
}
defer func() {
go active.Release(context.TODO())
}()
root = active
}
} else {
mounts = append(mounts, executor.Mount{Src: mountable, Dest: m.Dest, Readonly: m.Readonly, Selector: m.Selector})
}
}
// sort mounts so parents are mounted first
sort.Slice(mounts, func(i, j int) bool {
return mounts[i].Dest < mounts[j].Dest
})
extraHosts, err := parseExtraHosts(e.op.Meta.ExtraHosts)
if err != nil {
return nil, err
}
meta := executor.Meta{
Args: e.op.Meta.Args,
Env: e.op.Meta.Env,
Cwd: e.op.Meta.Cwd,
User: e.op.Meta.User,
ReadonlyRootFS: readonlyRootFS,
ExtraHosts: extraHosts,
NetMode: e.op.Network,
SecurityMode: e.op.Security,
}
if e.op.Meta.ProxyEnv != nil {
meta.Env = append(meta.Env, proxyEnvList(e.op.Meta.ProxyEnv)...)
}
meta.Env = addDefaultEnvvar(meta.Env, "PATH", utilsystem.DefaultPathEnv)
stdout, stderr := logs.NewLogStreams(ctx, os.Getenv("BUILDKIT_DEBUG_EXEC_OUTPUT") == "1")
defer stdout.Close()
defer stderr.Close()
if err := e.exec.Exec(ctx, meta, root, mounts, nil, stdout, stderr); err != nil {
return nil, errors.Wrapf(err, "executor failed running %v", meta.Args)
}
refs := []solver.Result{}
for i, out := range outputs {
if mutable, ok := out.(cache.MutableRef); ok {
ref, err := mutable.Commit(ctx)
if err != nil {
return nil, errors.Wrapf(err, "error committing %s", mutable.ID())
}
refs = append(refs, worker.NewWorkerRefResult(ref, e.w))
} else {
refs = append(refs, worker.NewWorkerRefResult(out.(cache.ImmutableRef), e.w))
}
outputs[i] = nil
}
return refs, nil
}
func proxyEnvList(p *pb.ProxyEnv) []string {
out := []string{}
if v := p.HttpProxy; v != "" {
out = append(out, "HTTP_PROXY="+v, "http_proxy="+v)
}
if v := p.HttpsProxy; v != "" {
out = append(out, "HTTPS_PROXY="+v, "https_proxy="+v)
}
if v := p.FtpProxy; v != "" {
out = append(out, "FTP_PROXY="+v, "ftp_proxy="+v)
}
if v := p.NoProxy; v != "" {
out = append(out, "NO_PROXY="+v, "no_proxy="+v)
}
return out
}
func newTmpfs(idmap *idtools.IdentityMapping) cache.Mountable {
return &tmpfs{idmap: idmap}
}
type tmpfs struct {
idmap *idtools.IdentityMapping
}
func (f *tmpfs) Mount(ctx context.Context, readonly bool) (snapshot.Mountable, error) {
return &tmpfsMount{readonly: readonly, idmap: f.idmap}, nil
}
type tmpfsMount struct {
readonly bool
idmap *idtools.IdentityMapping
}
func (m *tmpfsMount) Mount() ([]mount.Mount, func() error, error) {
opt := []string{"nosuid"}
if m.readonly {
opt = append(opt, "ro")
}
return []mount.Mount{{
Type: "tmpfs",
Source: "tmpfs",
Options: opt,
}}, func() error { return nil }, nil
}
func (m *tmpfsMount) IdentityMapping() *idtools.IdentityMapping {
return m.idmap
}
var cacheRefsLocker = locker.New()
var sharedCacheRefs = &cacheRefs{}
type cacheRefs struct {
mu sync.Mutex
shares map[string]*cacheRefShare
}
// ClearActiveCacheMounts clears shared cache mounts currently in use.
// Caller needs to hold CacheMountsLocker before calling
func ClearActiveCacheMounts() {
sharedCacheRefs.shares = nil
}
func CacheMountsLocker() sync.Locker {
return &sharedCacheRefs.mu
}
func (r *cacheRefs) get(key string, fn func() (cache.MutableRef, error)) (cache.MutableRef, error) {
r.mu.Lock()
defer r.mu.Unlock()
if r.shares == nil {
r.shares = map[string]*cacheRefShare{}
}
share, ok := r.shares[key]
if ok {
return share.clone(), nil
}
mref, err := fn()
if err != nil {
return nil, err
}
share = &cacheRefShare{MutableRef: mref, main: r, key: key, refs: map[*cacheRef]struct{}{}}
r.shares[key] = share
return share.clone(), nil
}
type cacheRefShare struct {
cache.MutableRef
mu sync.Mutex
refs map[*cacheRef]struct{}
main *cacheRefs
key string
}
func (r *cacheRefShare) clone() cache.MutableRef {
cacheRef := &cacheRef{cacheRefShare: r}
if cacheRefCloneHijack != nil {
cacheRefCloneHijack()
}
r.mu.Lock()
r.refs[cacheRef] = struct{}{}
r.mu.Unlock()
return cacheRef
}
func (r *cacheRefShare) release(ctx context.Context) error {
if r.main != nil {
delete(r.main.shares, r.key)
}
return r.MutableRef.Release(ctx)
}
var cacheRefReleaseHijack func()
var cacheRefCloneHijack func()
type cacheRef struct {
*cacheRefShare
}
func (r *cacheRef) Release(ctx context.Context) error {
if r.main != nil {
r.main.mu.Lock()
defer r.main.mu.Unlock()
}
r.mu.Lock()
defer r.mu.Unlock()
delete(r.refs, r)
if len(r.refs) == 0 {
if cacheRefReleaseHijack != nil {
cacheRefReleaseHijack()
}
return r.release(ctx)
}
return nil
}
func parseExtraHosts(ips []*pb.HostIP) ([]executor.HostIP, error) {
out := make([]executor.HostIP, len(ips))
for i, hip := range ips {
ip := net.ParseIP(hip.IP)
if ip == nil {
return nil, errors.Errorf("failed to parse IP %s", hip.IP)
}
out[i] = executor.HostIP{
IP: ip,
Host: hip.Host,
}
}
return out, nil
} | v.Index = key
if err := si.Update(func(b *bolt.Bucket) error {
return si.SetValue(b, key, v) |
crypto_setup_server.go | package handshake
import (
"bytes"
"crypto/rand"
"encoding/binary"
"errors"
"io"
"net"
"sync"
"github.com/lucas-clemente/pstream/internal/crypto"
"github.com/lucas-clemente/pstream/internal/protocol"
"github.com/lucas-clemente/pstream/internal/utils"
"github.com/lucas-clemente/pstream/qerr"
)
// QuicCryptoKeyDerivationFunction is used for key derivation
type QuicCryptoKeyDerivationFunction func(forwardSecure bool, sharedSecret, nonces []byte, connID protocol.ConnectionID, chlo []byte, scfg []byte, cert []byte, divNonce []byte, pers protocol.Perspective) (crypto.AEAD, error)
// KeyExchangeFunction is used to make a new KEX
type KeyExchangeFunction func() crypto.KeyExchange
// The CryptoSetupServer handles all things crypto for the Session
type cryptoSetupServer struct {
connID protocol.ConnectionID
remoteAddr net.Addr
scfg *ServerConfig
stkGenerator *CookieGenerator
diversificationNonce []byte
version protocol.VersionNumber
supportedVersions []protocol.VersionNumber
acceptSTKCallback func(net.Addr, *Cookie) bool
nullAEAD crypto.AEAD
secureAEAD crypto.AEAD
forwardSecureAEAD crypto.AEAD
receivedForwardSecurePacket bool
receivedSecurePacket bool
sentSHLO chan struct{} // this channel is closed as soon as the SHLO has been written
aeadChanged chan<- protocol.EncryptionLevel
keyDerivation QuicCryptoKeyDerivationFunction
keyExchange KeyExchangeFunction
cryptoStream io.ReadWriter
connectionParameters ConnectionParametersManager
mutex sync.RWMutex
}
var _ CryptoSetup = &cryptoSetupServer{}
// ErrHOLExperiment is returned when the client sends the FHL2 tag in the CHLO.
// This is an experiment implemented by Chrome in QUIC 36, which we don't support.
// TODO: remove this when dropping support for QUIC 36
var ErrHOLExperiment = qerr.Error(qerr.InvalidCryptoMessageParameter, "HOL experiment. Unsupported")
// ErrNSTPExperiment is returned when the client sends the NSTP tag in the CHLO.
// This is an experiment implemented by Chrome in QUIC 38, which we don't support at this point.
var ErrNSTPExperiment = qerr.Error(qerr.InvalidCryptoMessageParameter, "NSTP experiment. Unsupported")
// NewCryptoSetup creates a new CryptoSetup instance for a server
func NewCryptoSetup(
connID protocol.ConnectionID,
remoteAddr net.Addr,
version protocol.VersionNumber,
scfg *ServerConfig,
cryptoStream io.ReadWriter,
connectionParametersManager ConnectionParametersManager,
supportedVersions []protocol.VersionNumber,
acceptSTK func(net.Addr, *Cookie) bool,
aeadChanged chan<- protocol.EncryptionLevel,
) (CryptoSetup, error) {
stkGenerator, err := NewCookieGenerator()
if err != nil {
return nil, err
}
return &cryptoSetupServer{
connID: connID,
remoteAddr: remoteAddr,
version: version,
supportedVersions: supportedVersions,
scfg: scfg,
stkGenerator: stkGenerator,
keyDerivation: crypto.DeriveQuicCryptoAESKeys,
keyExchange: getEphermalKEX,
nullAEAD: crypto.NewNullAEAD(protocol.PerspectiveServer, version),
cryptoStream: cryptoStream,
connectionParameters: connectionParametersManager,
acceptSTKCallback: acceptSTK,
sentSHLO: make(chan struct{}),
aeadChanged: aeadChanged,
}, nil
}
// HandleCryptoStream reads and writes messages on the crypto stream
func (h *cryptoSetupServer) HandleCryptoStream() error {
for {
var chloData bytes.Buffer
message, err := ParseHandshakeMessage(io.TeeReader(h.cryptoStream, &chloData))
if err != nil {
return qerr.HandshakeFailed
}
if message.Tag != TagCHLO {
return qerr.InvalidCryptoMessageType
}
utils.Debugf("Got %s", message)
done, err := h.handleMessage(chloData.Bytes(), message.Data)
if err != nil {
return err
}
if done {
return nil
}
}
}
func (h *cryptoSetupServer) handleMessage(chloData []byte, cryptoData map[Tag][]byte) (bool, error) {
if _, isHOLExperiment := cryptoData[TagFHL2]; isHOLExperiment {
return false, ErrHOLExperiment
}
if _, isNSTPExperiment := cryptoData[TagNSTP]; isNSTPExperiment {
return false, ErrNSTPExperiment
}
sniSlice, ok := cryptoData[TagSNI]
if !ok {
return false, qerr.Error(qerr.CryptoMessageParameterNotFound, "SNI required")
}
sni := string(sniSlice)
if sni == "" {
return false, qerr.Error(qerr.CryptoMessageParameterNotFound, "SNI required")
}
// prevent version downgrade attacks
// see https://groups.google.com/a/chromium.org/forum/#!topic/proto-quic/N-de9j63tCk for a discussion and examples
verSlice, ok := cryptoData[TagVER]
if !ok {
return false, qerr.Error(qerr.InvalidCryptoMessageParameter, "client hello missing version tag")
}
if len(verSlice) != 4 {
return false, qerr.Error(qerr.InvalidCryptoMessageParameter, "incorrect version tag")
}
verTag := binary.LittleEndian.Uint32(verSlice)
ver := protocol.VersionTagToNumber(verTag)
// If the client's preferred version is not the version we are currently speaking, then the client went through a version negotiation. In this case, we need to make sure that we actually do not support this version and that it wasn't a downgrade attack.
if ver != h.version && protocol.IsSupportedVersion(h.supportedVersions, ver) {
return false, qerr.Error(qerr.VersionNegotiationMismatch, "Downgrade attack detected")
}
var reply []byte
var err error
certUncompressed, err := h.scfg.certChain.GetLeafCert(sni)
if err != nil {
return false, err
}
if !h.isInchoateCHLO(cryptoData, certUncompressed) {
// We have a CHLO with a proper server config ID, do a 0-RTT handshake
reply, err = h.handleCHLO(sni, chloData, cryptoData)
if err != nil {
return false, err
}
if _, err := h.cryptoStream.Write(reply); err != nil {
return false, err
}
h.aeadChanged <- protocol.EncryptionForwardSecure
close(h.sentSHLO)
return true, nil
}
// We have an inchoate or non-matching CHLO, we now send a rejection
reply, err = h.handleInchoateCHLO(sni, chloData, cryptoData)
if err != nil {
return false, err
}
_, err = h.cryptoStream.Write(reply)
return false, err
}
// Open a message
func (h *cryptoSetupServer) Open(dst, src []byte, packetNumber protocol.PacketNumber, associatedData []byte) ([]byte, protocol.EncryptionLevel, error) {
h.mutex.RLock()
defer h.mutex.RUnlock()
if h.forwardSecureAEAD != nil {
res, err := h.forwardSecureAEAD.Open(dst, src, packetNumber, associatedData)
if err == nil {
if !h.receivedForwardSecurePacket { // this is the first forward secure packet we receive from the client
h.receivedForwardSecurePacket = true
// wait until protocol.EncryptionForwardSecure was sent on the aeadChan
<-h.sentSHLO
close(h.aeadChanged)
}
return res, protocol.EncryptionForwardSecure, nil
}
if h.receivedForwardSecurePacket {
return nil, protocol.EncryptionUnspecified, err
}
}
if h.secureAEAD != nil {
res, err := h.secureAEAD.Open(dst, src, packetNumber, associatedData)
if err == nil {
h.receivedSecurePacket = true
return res, protocol.EncryptionSecure, nil
}
if h.receivedSecurePacket {
return nil, protocol.EncryptionUnspecified, err
}
}
res, err := h.nullAEAD.Open(dst, src, packetNumber, associatedData)
if err != nil {
return res, protocol.EncryptionUnspecified, err
}
return res, protocol.EncryptionUnencrypted, err
}
func (h *cryptoSetupServer) GetSealer() (protocol.EncryptionLevel, Sealer) {
h.mutex.RLock()
defer h.mutex.RUnlock()
if h.forwardSecureAEAD != nil {
return protocol.EncryptionForwardSecure, h.forwardSecureAEAD
}
return protocol.EncryptionUnencrypted, h.nullAEAD
}
func (h *cryptoSetupServer) GetSealerForCryptoStream() (protocol.EncryptionLevel, Sealer) {
h.mutex.RLock()
defer h.mutex.RUnlock()
if h.secureAEAD != nil {
return protocol.EncryptionSecure, h.secureAEAD
}
return protocol.EncryptionUnencrypted, h.nullAEAD
}
func (h *cryptoSetupServer) GetSealerWithEncryptionLevel(encLevel protocol.EncryptionLevel) (Sealer, error) {
h.mutex.RLock()
defer h.mutex.RUnlock()
switch encLevel {
case protocol.EncryptionUnencrypted:
return h.nullAEAD, nil
case protocol.EncryptionSecure:
if h.secureAEAD == nil {
return nil, errors.New("CryptoSetupServer: no secureAEAD")
}
return h.secureAEAD, nil
case protocol.EncryptionForwardSecure:
if h.forwardSecureAEAD == nil {
return nil, errors.New("CryptoSetupServer: no forwardSecureAEAD")
}
return h.forwardSecureAEAD, nil
}
return nil, errors.New("CryptoSetupServer: no encryption level specified")
}
func (h *cryptoSetupServer) isInchoateCHLO(cryptoData map[Tag][]byte, cert []byte) bool {
if _, ok := cryptoData[TagPUBS]; !ok {
return true
}
scid, ok := cryptoData[TagSCID]
if !ok || !bytes.Equal(h.scfg.ID, scid) {
return true
}
xlctTag, ok := cryptoData[TagXLCT]
if !ok || len(xlctTag) != 8 {
return true
}
xlct := binary.LittleEndian.Uint64(xlctTag)
if crypto.HashCert(cert) != xlct {
return true
}
return !h.acceptSTK(cryptoData[TagSTK])
}
func (h *cryptoSetupServer) acceptSTK(token []byte) bool {
stk, err := h.stkGenerator.DecodeToken(token)
if err != nil {
utils.Debugf("STK invalid: %s", err.Error())
return false
}
return h.acceptSTKCallback(h.remoteAddr, stk)
}
func (h *cryptoSetupServer) handleInchoateCHLO(sni string, chlo []byte, cryptoData map[Tag][]byte) ([]byte, error) {
if len(chlo) < protocol.ClientHelloMinimumSize {
return nil, qerr.Error(qerr.CryptoInvalidValueLength, "CHLO too small")
}
token, err := h.stkGenerator.NewToken(h.remoteAddr)
if err != nil {
return nil, err
}
replyMap := map[Tag][]byte{
TagSCFG: h.scfg.Get(),
TagSTK: token,
TagSVID: []byte("pstream"),
}
if h.acceptSTK(cryptoData[TagSTK]) {
proof, err := h.scfg.Sign(sni, chlo)
if err != nil {
return nil, err
}
commonSetHashes := cryptoData[TagCCS]
cachedCertsHashes := cryptoData[TagCCRT]
certCompressed, err := h.scfg.GetCertsCompressed(sni, commonSetHashes, cachedCertsHashes)
if err != nil {
return nil, err
}
// Token was valid, send more details
replyMap[TagPROF] = proof
replyMap[TagCERT] = certCompressed
}
message := HandshakeMessage{
Tag: TagREJ,
Data: replyMap,
}
var serverReply bytes.Buffer
message.Write(&serverReply)
utils.Debugf("Sending %s", message)
return serverReply.Bytes(), nil
}
func (h *cryptoSetupServer) handleCHLO(sni string, data []byte, cryptoData map[Tag][]byte) ([]byte, error) {
// We have a CHLO matching our server config, we can continue with the 0-RTT handshake
sharedSecret, err := h.scfg.kex.CalculateSharedKey(cryptoData[TagPUBS])
if err != nil {
return nil, err
}
h.mutex.Lock()
defer h.mutex.Unlock()
certUncompressed, err := h.scfg.certChain.GetLeafCert(sni)
if err != nil {
return nil, err
}
serverNonce := make([]byte, 32)
if _, err = rand.Read(serverNonce); err != nil {
return nil, err
}
h.diversificationNonce = make([]byte, 32)
if _, err = rand.Read(h.diversificationNonce); err != nil {
return nil, err
}
clientNonce := cryptoData[TagNONC]
err = h.validateClientNonce(clientNonce)
if err != nil {
return nil, err
}
aead := cryptoData[TagAEAD]
if !bytes.Equal(aead, []byte("AESG")) {
return nil, qerr.Error(qerr.CryptoNoSupport, "Unsupported AEAD or KEXS")
}
kexs := cryptoData[TagKEXS]
if !bytes.Equal(kexs, []byte("C255")) {
return nil, qerr.Error(qerr.CryptoNoSupport, "Unsupported AEAD or KEXS")
}
h.secureAEAD, err = h.keyDerivation(
false,
sharedSecret,
clientNonce,
h.connID,
data,
h.scfg.Get(),
certUncompressed,
h.diversificationNonce,
protocol.PerspectiveServer,
) |
h.aeadChanged <- protocol.EncryptionSecure
// Generate a new curve instance to derive the forward secure key
var fsNonce bytes.Buffer
fsNonce.Write(clientNonce)
fsNonce.Write(serverNonce)
ephermalKex := h.keyExchange()
ephermalSharedSecret, err := ephermalKex.CalculateSharedKey(cryptoData[TagPUBS])
if err != nil {
return nil, err
}
h.forwardSecureAEAD, err = h.keyDerivation(
true,
ephermalSharedSecret,
fsNonce.Bytes(),
h.connID,
data,
h.scfg.Get(),
certUncompressed,
nil,
protocol.PerspectiveServer,
)
if err != nil {
return nil, err
}
err = h.connectionParameters.SetFromMap(cryptoData)
if err != nil {
return nil, err
}
replyMap, err := h.connectionParameters.GetHelloMap()
if err != nil {
return nil, err
}
// add crypto parameters
verTag := &bytes.Buffer{}
for _, v := range h.supportedVersions {
utils.LittleEndian.WriteUint32(verTag, protocol.VersionNumberToTag(v))
}
replyMap[TagPUBS] = ephermalKex.PublicKey()
replyMap[TagSNO] = serverNonce
replyMap[TagVER] = verTag.Bytes()
// note that the SHLO *has* to fit into one packet
message := HandshakeMessage{
Tag: TagSHLO,
Data: replyMap,
}
var reply bytes.Buffer
message.Write(&reply)
utils.Debugf("Sending %s", message)
return reply.Bytes(), nil
}
// DiversificationNonce returns the diversification nonce
func (h *cryptoSetupServer) DiversificationNonce() []byte {
return h.diversificationNonce
}
func (h *cryptoSetupServer) SetDiversificationNonce(data []byte) {
panic("not needed for cryptoSetupServer")
}
func (h *cryptoSetupServer) validateClientNonce(nonce []byte) error {
if len(nonce) != 32 {
return qerr.Error(qerr.InvalidCryptoMessageParameter, "invalid client nonce length")
}
if !bytes.Equal(nonce[4:12], h.scfg.obit) {
return qerr.Error(qerr.InvalidCryptoMessageParameter, "OBIT not matching")
}
return nil
} | if err != nil {
return nil, err
} |
bisect-perf-regression.py | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Performance Test Bisect Tool
This script bisects a series of changelists using binary search. It starts at
a bad revision where a performance metric has regressed, and asks for a last
known-good revision. It will then binary search across this revision range by
syncing, building, and running a performance test. If the change is
suspected to occur as a result of WebKit/V8 changes, the script will
further bisect changes to those depots and attempt to narrow down the revision
range.
An example usage (using svn cl's):
./tools/bisect-perf-regression.py -c\
"out/Release/performance_ui_tests --gtest_filter=ShutdownTest.SimpleUserQuit"\
-g 168222 -b 168232 -m shutdown/simple-user-quit
Be aware that if you're using the git workflow and specify an svn revision,
the script will attempt to find the git SHA1 where svn changes up to that
revision were merged in.
An example usage (using git hashes):
./tools/bisect-perf-regression.py -c\
"out/Release/performance_ui_tests --gtest_filter=ShutdownTest.SimpleUserQuit"\
-g 1f6e67861535121c5c819c16a666f2436c207e7b\
-b b732f23b4f81c382db0b23b9035f3dadc7d925bb\
-m shutdown/simple-user-quit
"""
import errno
import imp
import math
import optparse
import os
import re
import shlex
import shutil
import subprocess
import sys
import threading
import time
import bisect_utils
# The additional repositories that might need to be bisected.
# If the repository has any dependant repositories (such as skia/src needs
# skia/include and skia/gyp to be updated), specify them in the 'depends'
# so that they're synced appropriately.
# Format is:
# src: path to the working directory.
# recurse: True if this repositry will get bisected.
# depends: A list of other repositories that are actually part of the same
# repository in svn.
# svn: Needed for git workflow to resolve hashes to svn revisions.
# from: Parent depot that must be bisected before this is bisected.
DEPOT_DEPS_NAME = {
'chromium' : {
"src" : "src/",
"recurse" : True,
"depends" : None,
"from" : 'cros'
},
'webkit' : {
"src" : "src/third_party/WebKit",
"recurse" : True,
"depends" : None,
"from" : 'chromium'
},
'v8' : {
"src" : "src/v8",
"recurse" : True,
"depends" : None,
"build_with": 'v8_bleeding_edge',
"from" : 'chromium'
},
'v8_bleeding_edge' : {
"src" : "src/v8_bleeding_edge",
"recurse" : False,
"depends" : None,
"svn": "https://v8.googlecode.com/svn/branches/bleeding_edge",
"from" : 'chromium'
},
'skia/src' : {
"src" : "src/third_party/skia/src",
"recurse" : True,
"svn" : "http://skia.googlecode.com/svn/trunk/src",
"depends" : ['skia/include', 'skia/gyp'],
"from" : 'chromium'
},
'skia/include' : {
"src" : "src/third_party/skia/include",
"recurse" : False,
"svn" : "http://skia.googlecode.com/svn/trunk/include",
"depends" : None,
"from" : 'chromium'
},
'skia/gyp' : {
"src" : "src/third_party/skia/gyp",
"recurse" : False,
"svn" : "http://skia.googlecode.com/svn/trunk/gyp",
"depends" : None,
"from" : 'chromium'
}
}
DEPOT_NAMES = DEPOT_DEPS_NAME.keys()
CROS_SDK_PATH = os.path.join('..', 'cros', 'chromite', 'bin', 'cros_sdk')
CROS_VERSION_PATTERN = 'new version number from %s'
CROS_CHROMEOS_PATTERN = 'chromeos-base/chromeos-chrome'
CROS_TEST_KEY_PATH = os.path.join('..', 'cros', 'chromite', 'ssh_keys',
'testing_rsa')
CROS_SCRIPT_KEY_PATH = os.path.join('..', 'cros', 'src', 'scripts',
'mod_for_test_scripts', 'ssh_keys',
'testing_rsa')
def CalculateTruncatedMean(data_set, truncate_percent):
"""Calculates the truncated mean of a set of values.
Args:
data_set: Set of values to use in calculation.
truncate_percent: The % from the upper/lower portions of the data set to
discard, expressed as a value in [0, 1].
Returns:
The truncated mean as a float.
"""
if len(data_set) > 2:
data_set = sorted(data_set)
discard_num_float = len(data_set) * truncate_percent
discard_num_int = int(math.floor(discard_num_float))
kept_weight = len(data_set) - discard_num_float * 2
data_set = data_set[discard_num_int:len(data_set)-discard_num_int]
weight_left = 1.0 - (discard_num_float - discard_num_int)
if weight_left < 1:
# If the % to discard leaves a fractional portion, need to weight those
# values.
unweighted_vals = data_set[1:len(data_set)-1]
weighted_vals = [data_set[0], data_set[len(data_set)-1]]
weighted_vals = [w * weight_left for w in weighted_vals]
data_set = weighted_vals + unweighted_vals
else:
kept_weight = len(data_set)
truncated_mean = reduce(lambda x, y: float(x) + float(y),
data_set) / kept_weight
return truncated_mean
def CalculateStandardDeviation(v):
mean = CalculateTruncatedMean(v, 0.0)
variances = [float(x) - mean for x in v]
variances = [x * x for x in variances]
variance = reduce(lambda x, y: float(x) + float(y), variances) / (len(v) - 1)
std_dev = math.sqrt(variance)
return std_dev
def IsStringFloat(string_to_check):
"""Checks whether or not the given string can be converted to a floating
point number.
Args:
string_to_check: Input string to check if it can be converted to a float.
Returns:
True if the string can be converted to a float.
"""
try:
float(string_to_check)
return True
except ValueError:
return False
def IsStringInt(string_to_check):
"""Checks whether or not the given string can be converted to a integer.
Args:
string_to_check: Input string to check if it can be converted to an int.
Returns:
True if the string can be converted to an int.
"""
try:
int(string_to_check)
return True
except ValueError:
return False
def IsWindows():
"""Checks whether or not the script is running on Windows.
Returns:
True if running on Windows.
"""
return os.name == 'nt'
def RunProcess(command, print_output=False):
"""Run an arbitrary command, returning its output and return code.
Args:
command: A list containing the command and args to execute.
print_output: Optional parameter to write output to stdout as it's
being collected.
Returns:
A tuple of the output and return code.
"""
if print_output:
print 'Running: [%s]' % ' '.join(command)
# On Windows, use shell=True to get PATH interpretation.
shell = IsWindows()
proc = subprocess.Popen(command,
shell=shell,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
bufsize=0)
out = ['']
def ReadOutputWhileProcessRuns(stdout, print_output, out):
while True:
line = stdout.readline()
out[0] += line
if line == '':
break
if print_output:
sys.stdout.write(line)
thread = threading.Thread(target=ReadOutputWhileProcessRuns,
args=(proc.stdout, print_output, out))
thread.start()
proc.wait()
thread.join()
return (out[0], proc.returncode)
def RunGit(command):
"""Run a git subcommand, returning its output and return code.
Args:
command: A list containing the args to git.
Returns:
A tuple of the output and return code.
"""
command = ['git'] + command
return RunProcess(command)
def CheckRunGit(command):
"""Run a git subcommand, returning its output and return code. Asserts if
the return code of the call is non-zero.
Args:
command: A list containing the args to git.
Returns:
A tuple of the output and return code.
"""
(output, return_code) = RunGit(command)
assert not return_code, 'An error occurred while running'\
' "git %s"' % ' '.join(command)
return output
def BuildWithMake(threads, targets, print_output):
cmd = ['make', 'BUILDTYPE=Release', '-j%d' % threads] + targets
(output, return_code) = RunProcess(cmd, print_output)
return not return_code
def BuildWithNinja(threads, targets, print_output):
cmd = ['ninja', '-C', os.path.join('out', 'Release'),
'-j%d' % threads] + targets
(output, return_code) = RunProcess(cmd, print_output)
return not return_code
def BuildWithVisualStudio(targets, print_output):
path_to_devenv = os.path.abspath(
os.path.join(os.environ['VS100COMNTOOLS'], '..', 'IDE', 'devenv.com'))
path_to_sln = os.path.join(os.getcwd(), 'chrome', 'chrome.sln')
cmd = [path_to_devenv, '/build', 'Release', path_to_sln]
for t in targets:
cmd.extend(['/Project', t])
(output, return_code) = RunProcess(cmd, print_output)
return not return_code
class Builder(object):
"""Builder is used by the bisect script to build relevant targets and deploy.
"""
def Build(self, depot, opts):
raise NotImplementedError()
class DesktopBuilder(Builder):
"""DesktopBuilder is used to build Chromium on linux/mac/windows."""
def Build(self, depot, opts):
"""Builds chrome and performance_ui_tests using options passed into
the script.
Args:
depot: Current depot being bisected.
opts: The options parsed from the command line.
Returns:
True if build was successful.
"""
targets = ['chrome', 'performance_ui_tests']
threads = 16
if opts.use_goma:
threads = 64
build_success = False
if opts.build_preference == 'make':
build_success = BuildWithMake(threads, targets,
opts.output_buildbot_annotations)
elif opts.build_preference == 'ninja':
if IsWindows():
targets = [t + '.exe' for t in targets]
build_success = BuildWithNinja(threads, targets,
opts.output_buildbot_annotations)
elif opts.build_preference == 'msvs':
assert IsWindows(), 'msvs is only supported on Windows.'
build_success = BuildWithVisualStudio(targets,
opts.output_buildbot_annotations)
else:
assert False, 'No build system defined.'
return build_success
class AndroidBuilder(Builder):
"""AndroidBuilder is used to build on android."""
def InstallAPK(self, opts):
"""Installs apk to device.
Args:
opts: The options parsed from the command line.
Returns:
True if successful.
"""
path_to_tool = os.path.join('build', 'android', 'adb_install_apk.py')
cmd = [path_to_tool, '--apk', 'ContentShell.apk', '--apk_package',
'org.chromium.content_shell_apk', '--release']
(_, return_code) = RunProcess(cmd, opts.output_buildbot_annotations)
return not return_code
def Build(self, depot, opts):
"""Builds the android content shell and other necessary tools using options
passed into the script.
Args:
depot: Current depot being bisected.
opts: The options parsed from the command line.
Returns:
True if build was successful.
"""
targets = ['content_shell_apk', 'forwarder2', 'md5sum']
threads = 16
if opts.use_goma:
threads = 64
build_success = False
if opts.build_preference == 'ninja':
build_success = BuildWithNinja(threads, targets,
opts.output_buildbot_annotations)
else:
assert False, 'No build system defined.'
if build_success:
build_success = self.InstallAPK(opts)
return build_success
class CrosBuilder(Builder):
"""CrosBuilder is used to build and image ChromeOS/Chromium when cros is the
target platform."""
def ImageToTarget(self, opts):
"""Installs latest image to target specified by opts.cros_remote_ip.
Args:
opts: Program options containing cros_board and cros_remote_ip.
Returns:
True if successful.
"""
try:
# Keys will most likely be set to 0640 after wiping the chroot.
os.chmod(CROS_SCRIPT_KEY_PATH, 0600)
os.chmod(CROS_TEST_KEY_PATH, 0600)
cmd = [CROS_SDK_PATH, '--', './bin/cros_image_to_target.py',
'--remote=%s' % opts.cros_remote_ip,
'--board=%s' % opts.cros_board, '--test', '--verbose']
(_, return_code) = RunProcess(cmd, opts.output_buildbot_annotations)
return not return_code
except OSError, e:
return False
def BuildPackages(self, opts, depot):
"""Builds packages for cros.
Args:
opts: Program options containing cros_board.
depot: The depot being bisected.
Returns:
True if successful.
"""
cmd = [CROS_SDK_PATH]
if depot != 'cros':
path_to_chrome = os.path.join(os.getcwd(), '..')
cmd += ['--chrome_root=%s' % path_to_chrome]
cmd += ['--']
if depot != 'cros':
cmd += ['CHROME_ORIGIN=LOCAL_SOURCE']
cmd += ['BUILDTYPE=Release', './build_packages',
'--board=%s' % opts.cros_board]
(_, return_code) = RunProcess(cmd, True)
return not return_code
def BuildImage(self, opts, depot):
"""Builds test image for cros.
Args:
opts: Program options containing cros_board.
depot: The depot being bisected.
Returns:
True if successful.
"""
cmd = [CROS_SDK_PATH]
if depot != 'cros':
path_to_chrome = os.path.join(os.getcwd(), '..')
cmd += ['--chrome_root=%s' % path_to_chrome]
cmd += ['--']
if depot != 'cros':
cmd += ['CHROME_ORIGIN=LOCAL_SOURCE']
cmd += ['BUILDTYPE=Release', '--', './build_image',
'--board=%s' % opts.cros_board, 'test']
(_, return_code) = RunProcess(cmd, opts.output_buildbot_annotations)
return not return_code
def Build(self, depot, opts):
"""Builds targets using options passed into the script.
Args:
depot: Current depot being bisected.
opts: The options parsed from the command line.
Returns:
True if build was successful.
"""
if self.BuildPackages(opts, depot):
if self.BuildImage(opts, depot):
return self.ImageToTarget(opts)
return False
class SourceControl(object):
"""SourceControl is an abstraction over the underlying source control
system used for chromium. For now only git is supported, but in the
future, the svn workflow could be added as well."""
def __init__(self):
super(SourceControl, self).__init__()
def SyncToRevisionWithGClient(self, revision):
"""Uses gclient to sync to the specified revision.
ie. gclient sync --revision <revision>
Args:
revision: The git SHA1 or svn CL (depending on workflow).
Returns:
The return code of the call.
"""
return bisect_utils.RunGClient(['sync', '--revision',
revision, '--verbose'])
def SyncToRevisionWithRepo(self, timestamp):
"""Uses repo to sync all the underlying git depots to the specified
time.
Args:
timestamp: The unix timestamp to sync to.
Returns:
The return code of the call.
"""
return bisect_utils.RunRepoSyncAtTimestamp(timestamp)
class GitSourceControl(SourceControl):
"""GitSourceControl is used to query the underlying source control. """
def __init__(self, opts):
super(GitSourceControl, self).__init__()
self.opts = opts
def IsGit(self):
return True
def GetRevisionList(self, revision_range_end, revision_range_start):
"""Retrieves a list of revisions between |revision_range_start| and
|revision_range_end|.
Args:
revision_range_end: The SHA1 for the end of the range.
revision_range_start: The SHA1 for the beginning of the range.
Returns:
A list of the revisions between |revision_range_start| and
|revision_range_end| (inclusive).
"""
revision_range = '%s..%s' % (revision_range_start, revision_range_end)
cmd = ['log', '--format=%H', '-10000', '--first-parent', revision_range]
log_output = CheckRunGit(cmd)
revision_hash_list = log_output.split()
revision_hash_list.append(revision_range_start)
return revision_hash_list
def SyncToRevision(self, revision, sync_client=None):
"""Syncs to the specified revision.
Args:
revision: The revision to sync to.
use_gclient: Specifies whether or not we should sync using gclient or
just use source control directly.
Returns:
True if successful.
"""
if not sync_client:
results = RunGit(['checkout', revision])[1]
elif sync_client == 'gclient':
results = self.SyncToRevisionWithGClient(revision)
elif sync_client == 'repo':
results = self.SyncToRevisionWithRepo(revision)
return not results
def ResolveToRevision(self, revision_to_check, depot, search):
"""If an SVN revision is supplied, try to resolve it to a git SHA1.
Args:
revision_to_check: The user supplied revision string that may need to be
resolved to a git SHA1.
depot: The depot the revision_to_check is from.
search: The number of changelists to try if the first fails to resolve
to a git hash. If the value is negative, the function will search
backwards chronologically, otherwise it will search forward.
Returns:
A string containing a git SHA1 hash, otherwise None.
"""
if depot != 'cros':
if not IsStringInt(revision_to_check):
return revision_to_check
depot_svn = 'svn://svn.chromium.org/chrome/trunk/src'
if depot != 'chromium':
depot_svn = DEPOT_DEPS_NAME[depot]['svn']
svn_revision = int(revision_to_check)
git_revision = None
if search > 0:
search_range = xrange(svn_revision, svn_revision + search, 1)
else:
search_range = xrange(svn_revision, svn_revision + search, -1)
for i in search_range:
svn_pattern = 'git-svn-id: %s@%d' % (depot_svn, i)
cmd = ['log', '--format=%H', '-1', '--grep', svn_pattern,
'origin/master']
(log_output, return_code) = RunGit(cmd)
assert not return_code, 'An error occurred while running'\
' "git %s"' % ' '.join(cmd)
if not return_code:
log_output = log_output.strip()
if log_output:
git_revision = log_output
break
return git_revision
else:
if IsStringInt(revision_to_check):
return int(revision_to_check)
else:
cwd = os.getcwd()
os.chdir(os.path.join(os.getcwd(), 'src', 'third_party',
'chromiumos-overlay'))
pattern = CROS_VERSION_PATTERN % revision_to_check
cmd = ['log', '--format=%ct', '-1', '--grep', pattern]
git_revision = None
log_output = CheckRunGit(cmd)
if log_output:
git_revision = log_output
git_revision = int(log_output.strip())
os.chdir(cwd)
return git_revision
def IsInProperBranch(self):
"""Confirms they're in the master branch for performing the bisection.
This is needed or gclient will fail to sync properly.
Returns:
True if the current branch on src is 'master'
"""
cmd = ['rev-parse', '--abbrev-ref', 'HEAD']
log_output = CheckRunGit(cmd)
log_output = log_output.strip()
return log_output == "master"
def SVNFindRev(self, revision):
"""Maps directly to the 'git svn find-rev' command.
Args:
revision: The git SHA1 to use.
Returns:
An integer changelist #, otherwise None.
"""
cmd = ['svn', 'find-rev', revision]
output = CheckRunGit(cmd)
svn_revision = output.strip()
if IsStringInt(svn_revision):
return int(svn_revision)
return None
def QueryRevisionInfo(self, revision):
"""Gathers information on a particular revision, such as author's name,
email, subject, and date.
Args:
revision: Revision you want to gather information on.
Returns:
A dict in the following format:
{
'author': %s,
'email': %s,
'date': %s,
'subject': %s,
}
"""
commit_info = {}
formats = ['%cN', '%cE', '%s', '%cD']
targets = ['author', 'email', 'subject', 'date']
for i in xrange(len(formats)):
cmd = ['log', '--format=%s' % formats[i], '-1', revision]
output = CheckRunGit(cmd)
commit_info[targets[i]] = output.rstrip()
return commit_info
def CheckoutFileAtRevision(self, file_name, revision):
"""Performs a checkout on a file at the given revision.
Returns:
True if successful.
"""
return not RunGit(['checkout', revision, file_name])[1]
def RevertFileToHead(self, file_name):
"""Unstages a file and returns it to HEAD.
Returns:
True if successful.
"""
# Reset doesn't seem to return 0 on success.
RunGit(['reset', 'HEAD', bisect_utils.FILE_DEPS_GIT])
return not RunGit(['checkout', bisect_utils.FILE_DEPS_GIT])[1]
def QueryFileRevisionHistory(self, filename, revision_start, revision_end):
"""Returns a list of commits that modified this file.
Args:
filename: Name of file.
revision_start: Start of revision range.
revision_end: End of revision range.
Returns:
Returns a list of commits that touched this file.
"""
cmd = ['log', '--format=%H', '%s~1..%s' % (revision_start, revision_end),
filename]
output = CheckRunGit(cmd)
return [o for o in output.split('\n') if o]
class BisectPerformanceMetrics(object):
"""BisectPerformanceMetrics performs a bisection against a list of range
of revisions to narrow down where performance regressions may have
occurred."""
def __init__(self, source_control, opts):
super(BisectPerformanceMetrics, self).__init__()
self.opts = opts
self.source_control = source_control
self.src_cwd = os.getcwd()
self.cros_cwd = os.path.join(os.getcwd(), '..', 'cros')
self.depot_cwd = {}
self.cleanup_commands = []
self.warnings = []
self.builder = None
if opts.target_platform == 'cros':
self.builder = CrosBuilder()
elif opts.target_platform == 'android':
self.builder = AndroidBuilder()
else:
self.builder = DesktopBuilder()
# This always starts true since the script grabs latest first.
self.was_blink = True
for d in DEPOT_NAMES:
# The working directory of each depot is just the path to the depot, but
# since we're already in 'src', we can skip that part.
self.depot_cwd[d] = self.src_cwd + DEPOT_DEPS_NAME[d]['src'][3:]
def PerformCleanup(self):
"""Performs cleanup when script is finished."""
os.chdir(self.src_cwd)
for c in self.cleanup_commands:
if c[0] == 'mv':
shutil.move(c[1], c[2])
else:
assert False, 'Invalid cleanup command.'
def GetRevisionList(self, depot, bad_revision, good_revision):
"""Retrieves a list of all the commits between the bad revision and
last known good revision."""
revision_work_list = []
if depot == 'cros':
revision_range_start = good_revision
revision_range_end = bad_revision
cwd = os.getcwd()
self.ChangeToDepotWorkingDirectory('cros')
# Print the commit timestamps for every commit in the revision time
# range. We'll sort them and bisect by that. There is a remote chance that
# 2 (or more) commits will share the exact same timestamp, but it's
# probably safe to ignore that case.
cmd = ['repo', 'forall', '-c',
'git log --format=%%ct --before=%d --after=%d' % (
revision_range_end, revision_range_start)]
(output, return_code) = RunProcess(cmd)
assert not return_code, 'An error occurred while running'\
' "%s"' % ' '.join(cmd)
os.chdir(cwd)
revision_work_list = list(set(
[int(o) for o in output.split('\n') if IsStringInt(o)]))
revision_work_list = sorted(revision_work_list, reverse=True)
else:
revision_work_list = self.source_control.GetRevisionList(bad_revision,
good_revision)
return revision_work_list
def Get3rdPartyRevisionsFromCurrentRevision(self, depot):
"""Parses the DEPS file to determine WebKit/v8/etc... versions.
Returns:
A dict in the format {depot:revision} if successful, otherwise None.
"""
cwd = os.getcwd()
self.ChangeToDepotWorkingDirectory(depot)
results = {}
if depot == 'chromium':
locals = {'Var': lambda _: locals["vars"][_],
'From': lambda *args: None}
execfile(bisect_utils.FILE_DEPS_GIT, {}, locals)
os.chdir(cwd)
rxp = re.compile(".git@(?P<revision>[a-fA-F0-9]+)")
for d in DEPOT_NAMES:
if DEPOT_DEPS_NAME[d]['recurse'] and\
DEPOT_DEPS_NAME[d]['from'] == depot:
if locals['deps'].has_key(DEPOT_DEPS_NAME[d]['src']):
re_results = rxp.search(locals['deps'][DEPOT_DEPS_NAME[d]['src']])
if re_results:
results[d] = re_results.group('revision')
else:
return None
else:
return None
elif depot == 'cros':
cmd = [CROS_SDK_PATH, '--', 'portageq-%s' % self.opts.cros_board,
'best_visible', '/build/%s' % self.opts.cros_board, 'ebuild',
CROS_CHROMEOS_PATTERN]
(output, return_code) = RunProcess(cmd)
assert not return_code, 'An error occurred while running'\
' "%s"' % ' '.join(cmd)
if len(output) > CROS_CHROMEOS_PATTERN:
output = output[len(CROS_CHROMEOS_PATTERN):]
if len(output) > 1:
output = output.split('_')[0]
if len(output) > 3:
contents = output.split('.')
version = contents[2]
if contents[3] != '0':
warningText = 'Chrome version: %s.%s but using %s.0 to bisect.' %\
(version, contents[3], version)
if not warningText in self.warnings:
self.warnings.append(warningText)
cwd = os.getcwd()
self.ChangeToDepotWorkingDirectory('chromium')
return_code = CheckRunGit(['log', '-1', '--format=%H',
'[email protected]', '--grep=to %s' % version,
'origin/master'])
os.chdir(cwd)
results['chromium'] = output.strip()
return results
def BuildCurrentRevision(self, depot):
"""Builds chrome and performance_ui_tests on the current revision.
Returns:
True if the build was successful.
"""
if self.opts.debug_ignore_build:
return True
cwd = os.getcwd()
os.chdir(self.src_cwd)
build_success = self.builder.Build(depot, self.opts)
os.chdir(cwd)
return build_success
def RunGClientHooks(self):
"""Runs gclient with runhooks command.
Returns:
True if gclient reports no errors.
"""
if self.opts.debug_ignore_build:
return True
return not bisect_utils.RunGClient(['runhooks'])
def ParseMetricValuesFromOutput(self, metric, text):
"""Parses output from performance_ui_tests and retrieves the results for
a given metric.
Args:
metric: The metric as a list of [<trace>, <value>] strings.
text: The text to parse the metric values from.
Returns:
A list of floating point numbers found.
"""
# Format is: RESULT <graph>: <trace>= <value> <units>
metric_formatted = re.escape('RESULT %s: %s=' % (metric[0], metric[1]))
text_lines = text.split('\n')
values_list = []
for current_line in text_lines:
# Parse the output from the performance test for the metric we're
# interested in.
metric_re = metric_formatted +\
"(\s)*(?P<values>[0-9]+(\.[0-9]*)?)"
metric_re = re.compile(metric_re)
regex_results = metric_re.search(current_line)
if not regex_results is None:
values_list += [regex_results.group('values')]
else:
metric_re = metric_formatted +\
"(\s)*\[(\s)*(?P<values>[0-9,.]+)\]"
metric_re = re.compile(metric_re)
regex_results = metric_re.search(current_line)
if not regex_results is None:
metric_values = regex_results.group('values')
values_list += metric_values.split(',')
values_list = [float(v) for v in values_list if IsStringFloat(v)]
# If the metric is times/t, we need to sum the timings in order to get
# similar regression results as the try-bots.
if metric == ['times', 't']:
if values_list:
values_list = [reduce(lambda x, y: float(x) + float(y), values_list)]
return values_list
def RunPerformanceTestAndParseResults(self, command_to_run, metric):
"""Runs a performance test on the current revision by executing the
'command_to_run' and parses the results.
Args:
command_to_run: The command to be run to execute the performance test.
metric: The metric to parse out from the results of the performance test.
Returns:
On success, it will return a tuple of the average value of the metric,
and a success code of 0.
"""
if self.opts.debug_ignore_perf_test:
return ({'mean': 0.0, 'std_dev': 0.0}, 0)
if IsWindows():
command_to_run = command_to_run.replace('/', r'\\')
args = shlex.split(command_to_run)
# If running a telemetry test for cros, insert the remote ip, and
# identity parameters.
if self.opts.target_platform == 'cros':
if 'tools/perf/run_' in args[0]:
args.append('--remote=%s' % self.opts.cros_remote_ip)
args.append('--identity=%s' % CROS_TEST_KEY_PATH)
cwd = os.getcwd()
os.chdir(self.src_cwd)
start_time = time.time()
metric_values = []
for i in xrange(self.opts.repeat_test_count):
# Can ignore the return code since if the tests fail, it won't return 0.
(output, return_code) = RunProcess(args,
self.opts.output_buildbot_annotations)
metric_values += self.ParseMetricValuesFromOutput(metric, output)
elapsed_minutes = (time.time() - start_time) / 60.0
if elapsed_minutes >= self.opts.repeat_test_max_time or not metric_values:
break
os.chdir(cwd)
# Need to get the average value if there were multiple values.
if metric_values:
truncated_mean = CalculateTruncatedMean(metric_values,
self.opts.truncate_percent)
standard_dev = CalculateStandardDeviation(metric_values)
values = {
'mean': truncated_mean,
'std_dev': standard_dev,
}
print 'Results of performance test: %12f %12f' % (
truncated_mean, standard_dev)
print
return (values, 0)
else:
return ('Invalid metric specified, or no values returned from '
'performance test.', -1)
def FindAllRevisionsToSync(self, revision, depot):
"""Finds all dependant revisions and depots that need to be synced for a
given revision. This is only useful in the git workflow, as an svn depot
may be split into multiple mirrors.
ie. skia is broken up into 3 git mirrors over skia/src, skia/gyp, and
skia/include. To sync skia/src properly, one has to find the proper
revisions in skia/gyp and skia/include.
Args:
revision: The revision to sync to.
depot: The depot in use at the moment (probably skia).
Returns:
A list of [depot, revision] pairs that need to be synced.
"""
revisions_to_sync = [[depot, revision]]
is_base = (depot == 'chromium') or (depot == 'cros')
# Some SVN depots were split into multiple git depots, so we need to
# figure out for each mirror which git revision to grab. There's no
# guarantee that the SVN revision will exist for each of the dependant
# depots, so we have to grep the git logs and grab the next earlier one.
if not is_base and\
DEPOT_DEPS_NAME[depot]['depends'] and\
self.source_control.IsGit():
svn_rev = self.source_control.SVNFindRev(revision)
for d in DEPOT_DEPS_NAME[depot]['depends']:
self.ChangeToDepotWorkingDirectory(d)
dependant_rev = self.source_control.ResolveToRevision(svn_rev, d, -1000)
if dependant_rev:
revisions_to_sync.append([d, dependant_rev])
num_resolved = len(revisions_to_sync)
num_needed = len(DEPOT_DEPS_NAME[depot]['depends'])
self.ChangeToDepotWorkingDirectory(depot)
if not ((num_resolved - 1) == num_needed):
return None
return revisions_to_sync
def PerformPreBuildCleanup(self):
"""Performs necessary cleanup between runs."""
print 'Cleaning up between runs.'
print
# Having these pyc files around between runs can confuse the
# perf tests and cause them to crash.
for (path, dir, files) in os.walk(self.src_cwd):
for cur_file in files:
if cur_file.endswith('.pyc'):
path_to_file = os.path.join(path, cur_file)
os.remove(path_to_file)
def PerformWebkitDirectoryCleanup(self, revision):
"""If the script is switching between Blink and WebKit during bisect,
its faster to just delete the directory rather than leave it up to git
to sync.
Returns:
True if successful.
"""
if not self.source_control.CheckoutFileAtRevision(
bisect_utils.FILE_DEPS_GIT, revision):
return False
cwd = os.getcwd()
os.chdir(self.src_cwd)
is_blink = bisect_utils.IsDepsFileBlink()
os.chdir(cwd)
if not self.source_control.RevertFileToHead(
bisect_utils.FILE_DEPS_GIT):
return False
if self.was_blink != is_blink:
self.was_blink = is_blink
return bisect_utils.RemoveThirdPartyWebkitDirectory()
return True
def PerformCrosChrootCleanup(self):
"""Deletes the chroot.
Returns:
True if successful.
"""
cwd = os.getcwd()
self.ChangeToDepotWorkingDirectory('cros')
cmd = [CROS_SDK_PATH, '--delete']
(_, return_code) = RunProcess(cmd, self.opts.output_buildbot_annotations)
os.chdir(cwd)
return not return_code
def CreateCrosChroot(self):
"""Creates a new chroot.
Returns:
True if successful.
"""
cwd = os.getcwd()
self.ChangeToDepotWorkingDirectory('cros')
cmd = [CROS_SDK_PATH, '--create']
(_, return_code) = RunProcess(cmd, self.opts.output_buildbot_annotations)
os.chdir(cwd)
return not return_code
def PerformPreSyncCleanup(self, revision, depot):
"""Performs any necessary cleanup before syncing.
Returns:
True if successful.
"""
if depot == 'chromium':
return self.PerformWebkitDirectoryCleanup(revision)
elif depot == 'cros':
return self.PerformCrosChrootCleanup()
return True
def RunPostSync(self, depot):
"""Performs any work after syncing.
Returns:
True if successful.
"""
if depot == 'chromium':
return self.RunGClientHooks()
elif depot == 'cros':
return self.CreateCrosChroot()
return True
def SyncBuildAndRunRevision(self, revision, depot, command_to_run, metric):
"""Performs a full sync/build/run of the specified revision.
Args:
revision: The revision to sync to.
depot: The depot that's being used at the moment (src, webkit, etc.)
command_to_run: The command to execute the performance test.
metric: The performance metric being tested.
Returns:
On success, a tuple containing the results of the performance test.
Otherwise, a tuple with the error message.
"""
sync_client = None
if depot == 'chromium':
sync_client = 'gclient'
elif depot == 'cros':
sync_client = 'repo'
revisions_to_sync = self.FindAllRevisionsToSync(revision, depot)
if not revisions_to_sync:
return ('Failed to resolve dependant depots.', 1)
if not self.PerformPreSyncCleanup(revision, depot):
return ('Failed to perform pre-sync cleanup.', 1)
success = True
if not self.opts.debug_ignore_sync:
for r in revisions_to_sync:
self.ChangeToDepotWorkingDirectory(r[0])
if sync_client:
self.PerformPreBuildCleanup()
if not self.source_control.SyncToRevision(r[1], sync_client):
success = False
break
if success:
success = self.RunPostSync(depot)
if success:
if self.BuildCurrentRevision(depot):
results = self.RunPerformanceTestAndParseResults(command_to_run,
metric)
if results[1] == 0 and sync_client:
external_revisions = self.Get3rdPartyRevisionsFromCurrentRevision(
depot)
if external_revisions:
return (results[0], results[1], external_revisions)
else:
return ('Failed to parse DEPS file for external revisions.', 1)
else:
return results
else:
return ('Failed to build revision: [%s]' % (str(revision, )), 1)
else:
return ('Failed to run [gclient runhooks].', 1)
else:
return ('Failed to sync revision: [%s]' % (str(revision, )), 1)
def CheckIfRunPassed(self, current_value, known_good_value, known_bad_value):
"""Given known good and bad values, decide if the current_value passed
or failed.
Args:
current_value: The value of the metric being checked.
known_bad_value: The reference value for a "failed" run.
known_good_value: The reference value for a "passed" run.
Returns:
True if the current_value is closer to the known_good_value than the
known_bad_value.
"""
dist_to_good_value = abs(current_value['mean'] - known_good_value['mean'])
dist_to_bad_value = abs(current_value['mean'] - known_bad_value['mean'])
return dist_to_good_value < dist_to_bad_value
def ChangeToDepotWorkingDirectory(self, depot_name):
"""Given a depot, changes to the appropriate working directory.
Args:
depot_name: The name of the depot (see DEPOT_NAMES).
"""
if depot_name == 'chromium':
os.chdir(self.src_cwd)
elif depot_name == 'cros':
os.chdir(self.cros_cwd)
elif depot_name in DEPOT_NAMES:
os.chdir(self.depot_cwd[depot_name])
else:
assert False, 'Unknown depot [ %s ] encountered. Possibly a new one'\
' was added without proper support?' %\
(depot_name,)
def PrepareToBisectOnDepot(self,
current_depot,
end_revision,
start_revision):
"""Changes to the appropriate directory and gathers a list of revisions
to bisect between |start_revision| and |end_revision|.
Args:
current_depot: The depot we want to bisect.
end_revision: End of the revision range.
start_revision: Start of the revision range.
Returns:
A list containing the revisions between |start_revision| and
|end_revision| inclusive.
"""
# Change into working directory of external library to run
# subsequent commands.
old_cwd = os.getcwd()
os.chdir(self.depot_cwd[current_depot])
# V8 (and possibly others) is merged in periodically. Bisecting
# this directory directly won't give much good info.
if DEPOT_DEPS_NAME[current_depot].has_key('build_with'):
new_depot = DEPOT_DEPS_NAME[current_depot]['build_with']
svn_start_revision = self.source_control.SVNFindRev(start_revision)
svn_end_revision = self.source_control.SVNFindRev(end_revision)
os.chdir(self.depot_cwd[new_depot])
start_revision = self.source_control.ResolveToRevision(
svn_start_revision, new_depot, -1000)
end_revision = self.source_control.ResolveToRevision(
svn_end_revision, new_depot, -1000)
old_name = DEPOT_DEPS_NAME[current_depot]['src'][4:]
new_name = DEPOT_DEPS_NAME[new_depot]['src'][4:]
os.chdir(self.src_cwd)
shutil.move(old_name, old_name + '.bak')
shutil.move(new_name, old_name)
os.chdir(self.depot_cwd[current_depot])
self.cleanup_commands.append(['mv', old_name, new_name])
self.cleanup_commands.append(['mv', old_name + '.bak', old_name])
os.chdir(self.depot_cwd[current_depot])
depot_revision_list = self.GetRevisionList(current_depot,
end_revision,
start_revision)
os.chdir(old_cwd)
return depot_revision_list
def GatherReferenceValues(self, good_rev, bad_rev, cmd, metric, target_depot):
"""Gathers reference values by running the performance tests on the
known good and bad revisions.
Args:
good_rev: The last known good revision where the performance regression
has not occurred yet.
bad_rev: A revision where the performance regression has already occurred.
cmd: The command to execute the performance test.
metric: The metric being tested for regression.
Returns:
A tuple with the results of building and running each revision.
"""
bad_run_results = self.SyncBuildAndRunRevision(bad_rev,
target_depot,
cmd,
metric)
good_run_results = None
if not bad_run_results[1]:
good_run_results = self.SyncBuildAndRunRevision(good_rev,
target_depot,
cmd,
metric)
return (bad_run_results, good_run_results)
def AddRevisionsIntoRevisionData(self, revisions, depot, sort, revision_data):
"""Adds new revisions to the revision_data dict and initializes them.
Args:
revisions: List of revisions to add.
depot: Depot that's currently in use (src, webkit, etc...)
sort: Sorting key for displaying revisions.
revision_data: A dict to add the new revisions into. Existing revisions
will have their sort keys offset.
"""
num_depot_revisions = len(revisions)
for k, v in revision_data.iteritems():
if v['sort'] > sort:
v['sort'] += num_depot_revisions
for i in xrange(num_depot_revisions):
r = revisions[i]
revision_data[r] = {'revision' : r,
'depot' : depot,
'value' : None,
'passed' : '?',
'sort' : i + sort + 1}
def PrintRevisionsToBisectMessage(self, revision_list, depot):
if self.opts.output_buildbot_annotations:
step_name = 'Bisection Range: [%s - %s]' % (
revision_list[len(revision_list)-1], revision_list[0])
bisect_utils.OutputAnnotationStepStart(step_name)
print
print 'Revisions to bisect on [%s]:' % depot
for revision_id in revision_list:
print ' -> %s' % (revision_id, )
print
if self.opts.output_buildbot_annotations:
bisect_utils.OutputAnnotationStepClosed()
def NudgeRevisionsIfDEPSChange(self, bad_revision, good_revision):
"""Checks to see if changes to DEPS file occurred, and that the revision
range also includes the change to .DEPS.git. If it doesn't, attempts to
expand the revision range to include it.
Args:
bad_rev: First known bad revision.
good_revision: Last known good revision.
Returns:
A tuple with the new bad and good revisions.
"""
if self.source_control.IsGit() and self.opts.target_platform == 'chromium':
changes_to_deps = self.source_control.QueryFileRevisionHistory(
'DEPS', good_revision, bad_revision)
if changes_to_deps:
# DEPS file was changed, search from the oldest change to DEPS file to
# bad_revision to see if there are matching .DEPS.git changes.
oldest_deps_change = changes_to_deps[-1]
changes_to_gitdeps = self.source_control.QueryFileRevisionHistory(
bisect_utils.FILE_DEPS_GIT, oldest_deps_change, bad_revision)
if len(changes_to_deps) != len(changes_to_gitdeps):
# Grab the timestamp of the last DEPS change
cmd = ['log', '--format=%ct', '-1', changes_to_deps[0]]
output = CheckRunGit(cmd)
commit_time = int(output)
# Try looking for a commit that touches the .DEPS.git file in the
# next 15 minutes after the DEPS file change.
cmd = ['log', '--format=%H', '-1',
'--before=%d' % (commit_time + 900), '--after=%d' % commit_time,
'origin/master', bisect_utils.FILE_DEPS_GIT]
output = CheckRunGit(cmd)
output = output.strip()
if output:
self.warnings.append('Detected change to DEPS and modified '
'revision range to include change to .DEPS.git')
return (output, good_revision)
else:
self.warnings.append('Detected change to DEPS but couldn\'t find '
'matching change to .DEPS.git')
return (bad_revision, good_revision)
def Run(self, command_to_run, bad_revision_in, good_revision_in, metric):
"""Given known good and bad revisions, run a binary search on all
intermediate revisions to determine the CL where the performance regression
occurred.
Args:
command_to_run: Specify the command to execute the performance test.
good_revision: Number/tag of the known good revision.
bad_revision: Number/tag of the known bad revision.
metric: The performance metric to monitor.
Returns:
A dict with 2 members, 'revision_data' and 'error'. On success,
'revision_data' will contain a dict mapping revision ids to
data about that revision. Each piece of revision data consists of a
dict with the following keys:
'passed': Represents whether the performance test was successful at
that revision. Possible values include: 1 (passed), 0 (failed),
'?' (skipped), 'F' (build failed).
'depot': The depot that this revision is from (ie. WebKit)
'external': If the revision is a 'src' revision, 'external' contains
the revisions of each of the external libraries.
'sort': A sort value for sorting the dict in order of commits.
For example:
{
'error':None,
'revision_data':
{
'CL #1':
{
'passed':False,
'depot':'chromium',
'external':None,
'sort':0
}
}
}
If an error occurred, the 'error' field will contain the message and
'revision_data' will be empty.
"""
results = {'revision_data' : {},
'error' : None}
# Choose depot to bisect first
target_depot = 'chromium'
if self.opts.target_platform == 'cros':
target_depot = 'cros'
cwd = os.getcwd()
self.ChangeToDepotWorkingDirectory(target_depot)
# If they passed SVN CL's, etc... we can try match them to git SHA1's.
bad_revision = self.source_control.ResolveToRevision(bad_revision_in,
target_depot, 100)
good_revision = self.source_control.ResolveToRevision(good_revision_in,
target_depot, -100)
os.chdir(cwd)
if bad_revision is None:
results['error'] = 'Could\'t resolve [%s] to SHA1.' % (bad_revision_in,)
return results
if good_revision is None:
results['error'] = 'Could\'t resolve [%s] to SHA1.' % (good_revision_in,)
return results
(bad_revision, good_revision) = self.NudgeRevisionsIfDEPSChange(
bad_revision, good_revision)
if self.opts.output_buildbot_annotations:
bisect_utils.OutputAnnotationStepStart('Gathering Revisions')
print 'Gathering revision range for bisection.'
# Retrieve a list of revisions to do bisection on.
src_revision_list = self.GetRevisionList(target_depot,
bad_revision,
good_revision)
if self.opts.output_buildbot_annotations:
bisect_utils.OutputAnnotationStepClosed()
if src_revision_list:
# revision_data will store information about a revision such as the
# depot it came from, the webkit/V8 revision at that time,
# performance timing, build state, etc...
revision_data = results['revision_data']
# revision_list is the list we're binary searching through at the moment.
revision_list = []
sort_key_ids = 0
for current_revision_id in src_revision_list:
sort_key_ids += 1
revision_data[current_revision_id] = {'value' : None,
'passed' : '?',
'depot' : target_depot,
'external' : None,
'sort' : sort_key_ids}
revision_list.append(current_revision_id)
min_revision = 0
max_revision = len(revision_list) - 1
self.PrintRevisionsToBisectMessage(revision_list, target_depot)
if self.opts.output_buildbot_annotations:
bisect_utils.OutputAnnotationStepStart('Gathering Reference Values')
print 'Gathering reference values for bisection.'
# Perform the performance tests on the good and bad revisions, to get
# reference values.
(bad_results, good_results) = self.GatherReferenceValues(good_revision,
bad_revision,
command_to_run,
metric,
target_depot)
if self.opts.output_buildbot_annotations:
bisect_utils.OutputAnnotationStepClosed()
if bad_results[1]:
results['error'] = bad_results[0]
return results
if good_results[1]:
results['error'] = good_results[0]
return results
# We need these reference values to determine if later runs should be
# classified as pass or fail.
known_bad_value = bad_results[0]
known_good_value = good_results[0]
# Can just mark the good and bad revisions explicitly here since we
# already know the results.
bad_revision_data = revision_data[revision_list[0]]
bad_revision_data['external'] = bad_results[2]
bad_revision_data['passed'] = 0
bad_revision_data['value'] = known_bad_value
good_revision_data = revision_data[revision_list[max_revision]]
good_revision_data['external'] = good_results[2]
good_revision_data['passed'] = 1
good_revision_data['value'] = known_good_value
while True:
if not revision_list:
break
min_revision_data = revision_data[revision_list[min_revision]]
max_revision_data = revision_data[revision_list[max_revision]]
if max_revision - min_revision <= 1:
if min_revision_data['passed'] == '?':
next_revision_index = min_revision
elif max_revision_data['passed'] == '?':
next_revision_index = max_revision
elif min_revision_data['depot'] == 'chromium' or\
min_revision_data['depot'] == 'cros':
# If there were changes to any of the external libraries we track,
# should bisect the changes there as well.
external_depot = None
for current_depot in DEPOT_NAMES:
if DEPOT_DEPS_NAME[current_depot]["recurse"] and\
DEPOT_DEPS_NAME[current_depot]['from'] ==\
min_revision_data['depot']:
if min_revision_data['external'][current_depot] !=\
max_revision_data['external'][current_depot]:
external_depot = current_depot
break
# If there was no change in any of the external depots, the search
# is over.
if not external_depot:
break
earliest_revision = max_revision_data['external'][current_depot]
latest_revision = min_revision_data['external'][current_depot]
new_revision_list = self.PrepareToBisectOnDepot(external_depot,
latest_revision,
earliest_revision)
if not new_revision_list:
results['error'] = 'An error occurred attempting to retrieve'\
' revision range: [%s..%s]' %\
(depot_rev_range[1], depot_rev_range[0])
return results
self.AddRevisionsIntoRevisionData(new_revision_list,
external_depot,
min_revision_data['sort'],
revision_data)
# Reset the bisection and perform it on the newly inserted
# changelists.
revision_list = new_revision_list
min_revision = 0
max_revision = len(revision_list) - 1
sort_key_ids += len(revision_list)
print 'Regression in metric:%s appears to be the result of changes'\
' in [%s].' % (metric, current_depot)
self.PrintRevisionsToBisectMessage(revision_list, external_depot)
continue
else:
break
else:
next_revision_index = int((max_revision - min_revision) / 2) +\
min_revision
next_revision_id = revision_list[next_revision_index]
next_revision_data = revision_data[next_revision_id]
next_revision_depot = next_revision_data['depot']
self.ChangeToDepotWorkingDirectory(next_revision_depot)
if self.opts.output_buildbot_annotations:
step_name = 'Working on [%s]' % next_revision_id
bisect_utils.OutputAnnotationStepStart(step_name)
print 'Working on revision: [%s]' % next_revision_id
run_results = self.SyncBuildAndRunRevision(next_revision_id,
next_revision_depot,
command_to_run,
metric)
if self.opts.output_buildbot_annotations:
bisect_utils.OutputAnnotationStepClosed()
# If the build is successful, check whether or not the metric
# had regressed.
if not run_results[1]:
if len(run_results) > 2:
next_revision_data['external'] = run_results[2]
passed_regression = self.CheckIfRunPassed(run_results[0],
known_good_value,
known_bad_value)
next_revision_data['passed'] = passed_regression
next_revision_data['value'] = run_results[0]
if passed_regression:
max_revision = next_revision_index
else:
min_revision = next_revision_index |
# If the build is broken, remove it and redo search.
revision_list.pop(next_revision_index)
max_revision -= 1
else:
# Weren't able to sync and retrieve the revision range.
results['error'] = 'An error occurred attempting to retrieve revision '\
'range: [%s..%s]' % (good_revision, bad_revision)
return results
def FormatAndPrintResults(self, bisect_results):
"""Prints the results from a bisection run in a readable format.
Args
bisect_results: The results from a bisection test run.
"""
revision_data = bisect_results['revision_data']
revision_data_sorted = sorted(revision_data.iteritems(),
key = lambda x: x[1]['sort'])
if self.opts.output_buildbot_annotations:
bisect_utils.OutputAnnotationStepStart('Results')
print
print 'Full results of bisection:'
for current_id, current_data in revision_data_sorted:
build_status = current_data['passed']
if type(build_status) is bool:
build_status = int(build_status)
print ' %8s %40s %s' % (current_data['depot'],
current_id, build_status)
print
print
print 'Tested commits:'
for current_id, current_data in revision_data_sorted:
if current_data['value']:
print ' %8s %40s %12f %12f' % (
current_data['depot'], current_id,
current_data['value']['mean'], current_data['value']['std_dev'])
print
# Find range where it possibly broke.
first_working_revision = None
last_broken_revision = None
for k, v in revision_data_sorted:
if v['passed'] == 1:
if not first_working_revision:
first_working_revision = k
if not v['passed']:
last_broken_revision = k
if last_broken_revision != None and first_working_revision != None:
print 'Results: Regression may have occurred in range:'
print ' -> First Bad Revision: [%40s] [%s]' %\
(last_broken_revision,
revision_data[last_broken_revision]['depot'])
print ' -> Last Good Revision: [%40s] [%s]' %\
(first_working_revision,
revision_data[first_working_revision]['depot'])
cwd = os.getcwd()
self.ChangeToDepotWorkingDirectory(
revision_data[last_broken_revision]['depot'])
if revision_data[last_broken_revision]['depot'] == 'cros':
# Want to get a list of all the commits and what depots they belong
# to so that we can grab info about each.
cmd = ['repo', 'forall', '-c',
'pwd ; git log --pretty=oneline --before=%d --after=%d' % (
last_broken_revision, first_working_revision + 1)]
(output, return_code) = RunProcess(cmd)
changes = []
assert not return_code, 'An error occurred while running'\
' "%s"' % ' '.join(cmd)
last_depot = None
cwd = os.getcwd()
for l in output.split('\n'):
if l:
# Output will be in form:
# /path_to_depot
# /path_to_other_depot
# <SHA1>
# /path_again
# <SHA1>
# etc.
if l[0] == '/':
last_depot = l
else:
contents = l.split(' ')
if len(contents) > 1:
changes.append([last_depot, contents[0]])
print
for c in changes:
os.chdir(c[0])
info = self.source_control.QueryRevisionInfo(c[1])
print
print 'Commit : %s' % c[1]
print 'Author : %s' % info['author']
print 'Email : %s' % info['email']
print 'Date : %s' % info['date']
print 'Subject : %s' % info['subject']
print
else:
info = self.source_control.QueryRevisionInfo(last_broken_revision)
print
print 'Commit : %s' % last_broken_revision
print 'Author : %s' % info['author']
print 'Email : %s' % info['email']
print 'Date : %s' % info['date']
print 'Subject : %s' % info['subject']
print
os.chdir(cwd)
# Give a warning if the values were very close together
good_std_dev = revision_data[first_working_revision]['value']['std_dev']
good_mean = revision_data[first_working_revision]['value']['mean']
bad_mean = revision_data[last_broken_revision]['value']['mean']
# A standard deviation of 0 could indicate either insufficient runs
# or a test that consistently returns the same value.
if good_std_dev > 0:
deviations = math.fabs(bad_mean - good_mean) / good_std_dev
if deviations < 1.5:
self.warnings.append('Regression was less than 1.5 standard '
'deviations from "good" value. Results may not be accurate.')
elif self.opts.repeat_test_count == 1:
self.warnings.append('Tests were only set to run once. This '
'may be insufficient to get meaningful results.')
# Check for any other possible regression ranges
prev_revision_data = revision_data_sorted[0][1]
prev_revision_id = revision_data_sorted[0][0]
possible_regressions = []
for current_id, current_data in revision_data_sorted:
if current_data['value']:
prev_mean = prev_revision_data['value']['mean']
cur_mean = current_data['value']['mean']
if good_std_dev:
deviations = math.fabs(prev_mean - cur_mean) / good_std_dev
else:
deviations = None
if good_mean:
percent_change = (prev_mean - cur_mean) / good_mean
# If the "good" valuse are supposed to be higher than the "bad"
# values (ie. scores), flip the sign of the percent change so that
# a positive value always represents a regression.
if bad_mean < good_mean:
percent_change *= -1.0
else:
percent_change = None
if deviations >= 1.5 or percent_change > 0.01:
if current_id != first_working_revision:
possible_regressions.append(
[current_id, prev_revision_id, percent_change, deviations])
prev_revision_data = current_data
prev_revision_id = current_id
if possible_regressions:
print
print 'Other regressions may have occurred:'
print
for p in possible_regressions:
current_id = p[0]
percent_change = p[2]
deviations = p[3]
current_data = revision_data[current_id]
previous_id = p[1]
previous_data = revision_data[previous_id]
if deviations is None:
deviations = 'N/A'
else:
deviations = '%.2f' % deviations
if percent_change is None:
percent_change = 0
print ' %8s %s [%.2f%%, %s x std.dev]' % (
previous_data['depot'], previous_id, 100 * percent_change,
deviations)
print ' %8s %s' % (
current_data['depot'], current_id)
print
if self.warnings:
print
print 'The following warnings were generated:'
print
for w in self.warnings:
print ' - %s' % w
print
if self.opts.output_buildbot_annotations:
bisect_utils.OutputAnnotationStepClosed()
def DetermineAndCreateSourceControl(opts):
"""Attempts to determine the underlying source control workflow and returns
a SourceControl object.
Returns:
An instance of a SourceControl object, or None if the current workflow
is unsupported.
"""
(output, return_code) = RunGit(['rev-parse', '--is-inside-work-tree'])
if output.strip() == 'true':
return GitSourceControl(opts)
return None
def SetNinjaBuildSystemDefault():
"""Makes ninja the default build system to be used by
the bisection script."""
gyp_var = os.getenv('GYP_GENERATORS')
if not gyp_var or not 'ninja' in gyp_var:
if gyp_var:
os.environ['GYP_GENERATORS'] = gyp_var + ',ninja'
else:
os.environ['GYP_GENERATORS'] = 'ninja'
if IsWindows():
os.environ['GYP_DEFINES'] = 'component=shared_library '\
'incremental_chrome_dll=1 disable_nacl=1 fastbuild=1 '\
'chromium_win_pch=0'
def SetMakeBuildSystemDefault():
"""Makes make the default build system to be used by
the bisection script."""
os.environ['GYP_GENERATORS'] = 'make'
def CheckPlatformSupported(opts):
"""Checks that this platform and build system are supported.
Args:
opts: The options parsed from the command line.
Returns:
True if the platform and build system are supported.
"""
# Haven't tested the script out on any other platforms yet.
supported = ['posix', 'nt']
if not os.name in supported:
print "Sorry, this platform isn't supported yet."
print
return False
if IsWindows():
if not opts.build_preference:
opts.build_preference = 'msvs'
if opts.build_preference == 'msvs':
if not os.getenv('VS100COMNTOOLS'):
print 'Error: Path to visual studio could not be determined.'
print
return False
elif opts.build_preference == 'ninja':
SetNinjaBuildSystemDefault()
else:
assert False, 'Error: %s build not supported' % opts.build_preference
else:
if not opts.build_preference:
if 'ninja' in os.getenv('GYP_GENERATORS'):
opts.build_preference = 'ninja'
else:
opts.build_preference = 'make'
if opts.build_preference == 'ninja':
SetNinjaBuildSystemDefault()
elif opts.build_preference == 'make':
SetMakeBuildSystemDefault()
elif opts.build_preference != 'make':
assert False, 'Error: %s build not supported' % opts.build_preference
bisect_utils.RunGClient(['runhooks'])
return True
def RmTreeAndMkDir(path_to_dir):
"""Removes the directory tree specified, and then creates an empty
directory in the same location.
Args:
path_to_dir: Path to the directory tree.
Returns:
True if successful, False if an error occurred.
"""
try:
if os.path.exists(path_to_dir):
shutil.rmtree(path_to_dir)
except OSError, e:
if e.errno != errno.ENOENT:
return False
try:
os.makedirs(path_to_dir)
except OSError, e:
if e.errno != errno.EEXIST:
return False
return True
def RemoveBuildFiles():
"""Removes build files from previous runs."""
if RmTreeAndMkDir(os.path.join('out', 'Release')):
if RmTreeAndMkDir(os.path.join('build', 'Release')):
return True
return False
def main():
usage = ('%prog [options] [-- chromium-options]\n'
'Perform binary search on revision history to find a minimal '
'range of revisions where a peformance metric regressed.\n')
parser = optparse.OptionParser(usage=usage)
parser.add_option('-c', '--command',
type='str',
help='A command to execute your performance test at' +
' each point in the bisection.')
parser.add_option('-b', '--bad_revision',
type='str',
help='A bad revision to start bisection. ' +
'Must be later than good revision. May be either a git' +
' or svn revision.')
parser.add_option('-g', '--good_revision',
type='str',
help='A revision to start bisection where performance' +
' test is known to pass. Must be earlier than the ' +
'bad revision. May be either a git or svn revision.')
parser.add_option('-m', '--metric',
type='str',
help='The desired metric to bisect on. For example ' +
'"vm_rss_final_b/vm_rss_f_b"')
parser.add_option('-w', '--working_directory',
type='str',
help='Path to the working directory where the script will '
'do an initial checkout of the chromium depot. The '
'files will be placed in a subdirectory "bisect" under '
'working_directory and that will be used to perform the '
'bisection. This parameter is optional, if it is not '
'supplied, the script will work from the current depot.')
parser.add_option('-r', '--repeat_test_count',
type='int',
default=20,
help='The number of times to repeat the performance test. '
'Values will be clamped to range [1, 100]. '
'Default value is 20.')
parser.add_option('--repeat_test_max_time',
type='int',
default=20,
help='The maximum time (in minutes) to take running the '
'performance tests. The script will run the performance '
'tests according to --repeat_test_count, so long as it '
'doesn\'t exceed --repeat_test_max_time. Values will be '
'clamped to range [1, 60].'
'Default value is 20.')
parser.add_option('-t', '--truncate_percent',
type='int',
default=25,
help='The highest/lowest % are discarded to form a '
'truncated mean. Values will be clamped to range [0, 25]. '
'Default value is 25 (highest/lowest 25% will be '
'discarded).')
parser.add_option('--build_preference',
type='choice',
choices=['msvs', 'ninja', 'make'],
help='The preferred build system to use. On linux/mac '
'the options are make/ninja. On Windows, the options '
'are msvs/ninja.')
parser.add_option('--target_platform',
type='choice',
choices=['chromium', 'cros', 'android'],
default='chromium',
help='The target platform. Choices are "chromium" (current '
'platform), "cros", or "android". If you specify something '
'other than "chromium", you must be properly set up to '
'build that platform.')
parser.add_option('--cros_board',
type='str',
help='The cros board type to build.')
parser.add_option('--cros_remote_ip',
type='str',
help='The remote machine to image to.')
parser.add_option('--use_goma',
action="store_true",
help='Add a bunch of extra threads for goma.')
parser.add_option('--output_buildbot_annotations',
action="store_true",
help='Add extra annotation output for buildbot.')
parser.add_option('--debug_ignore_build',
action="store_true",
help='DEBUG: Don\'t perform builds.')
parser.add_option('--debug_ignore_sync',
action="store_true",
help='DEBUG: Don\'t perform syncs.')
parser.add_option('--debug_ignore_perf_test',
action="store_true",
help='DEBUG: Don\'t perform performance tests.')
(opts, args) = parser.parse_args()
if not opts.command:
print 'Error: missing required parameter: --command'
print
parser.print_help()
return 1
if not opts.good_revision:
print 'Error: missing required parameter: --good_revision'
print
parser.print_help()
return 1
if not opts.bad_revision:
print 'Error: missing required parameter: --bad_revision'
print
parser.print_help()
return 1
if not opts.metric:
print 'Error: missing required parameter: --metric'
print
parser.print_help()
return 1
if opts.target_platform == 'cros':
# Run sudo up front to make sure credentials are cached for later.
print 'Sudo is required to build cros:'
print
RunProcess(['sudo', 'true'])
if not opts.cros_board:
print 'Error: missing required parameter: --cros_board'
print
parser.print_help()
return 1
if not opts.cros_remote_ip:
print 'Error: missing required parameter: --cros_remote_ip'
print
parser.print_help()
return 1
if not opts.working_directory:
print 'Error: missing required parameter: --working_directory'
print
parser.print_help()
return 1
opts.repeat_test_count = min(max(opts.repeat_test_count, 1), 100)
opts.repeat_test_max_time = min(max(opts.repeat_test_max_time, 1), 60)
opts.truncate_percent = min(max(opts.truncate_percent, 0), 25)
opts.truncate_percent = opts.truncate_percent / 100.0
metric_values = opts.metric.split('/')
if len(metric_values) != 2:
print "Invalid metric specified: [%s]" % (opts.metric,)
print
return 1
if opts.working_directory:
if bisect_utils.CreateBisectDirectoryAndSetupDepot(opts):
return 1
if not bisect_utils.SetupPlatformBuildEnvironment(opts):
print 'Error: Failed to set platform environment.'
print
return 1
os.chdir(os.path.join(os.getcwd(), 'src'))
if not RemoveBuildFiles():
print "Something went wrong removing the build files."
print
return 1
if not CheckPlatformSupported(opts):
return 1
# Check what source control method they're using. Only support git workflow
# at the moment.
source_control = DetermineAndCreateSourceControl(opts)
if not source_control:
print "Sorry, only the git workflow is supported at the moment."
print
return 1
# gClient sync seems to fail if you're not in master branch.
if not source_control.IsInProperBranch() and not opts.debug_ignore_sync:
print "You must switch to master branch to run bisection."
print
return 1
bisect_test = BisectPerformanceMetrics(source_control, opts)
try:
bisect_results = bisect_test.Run(opts.command,
opts.bad_revision,
opts.good_revision,
metric_values)
if not(bisect_results['error']):
bisect_test.FormatAndPrintResults(bisect_results)
finally:
bisect_test.PerformCleanup()
if not(bisect_results['error']):
return 0
else:
print 'Error: ' + bisect_results['error']
print
return 1
if __name__ == '__main__':
sys.exit(main()) | else:
next_revision_data['passed'] = 'F' |
lib.rs | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0 OR MIT
#[no_mangle]
fn main() | {
let arr = [1, 2, 3];
let r: core::ops::Range<usize> = uses_core::foo(&arr[..2]);
let i = uses_std::bar(r);
assert!(i.start == 10);
} |
|
prediction_utils.py | import time
import datetime
import numpy as np
__start_time = time.time()
__end_time = time.time()
def calc_accuracy(predicted_labels, real_labels):
correct_qty = 0
for i in range(len(predicted_labels)):
if predicted_labels[i] == real_labels[i]:
correct_qty += 1
return correct_qty * 100 / len(predicted_labels) |
def predict_labels(pyx):
"""
:param pyx: matrix with probability distribution p(y|x) for every class and *X_test* object
:return: list with predicted class labels
"""
return [np.argmax(row, axis=0) for row in pyx]
def convert_time(sec):
return str(datetime.timedelta(seconds=sec)) | |
mod.rs | use std::collections::HashSet;
use std::sync::Arc;
use std::time::{Duration, Instant};
use cadence::prelude::*;
use cadence::{MetricSink, NopMetricSink, StatsdClient};
use failure::Error;
use futures::{Future, future};
use futures::future::join_all;
use hyper::Client;
use hyper::client::HttpConnector;
use hyper_tls::HttpsConnector;
use relative_path::{RelativePath, RelativePathBuf};
use rustsec::db::AdvisoryDatabase;
use semver::VersionReq;
use slog::Logger;
use tokio_service::Service;
mod machines;
mod futures;
use ::utils::cache::Cache;
use ::models::repo::{Repository, RepoPath};
use ::models::crates::{CrateName, CratePath, CrateRelease, AnalyzedDependencies};
use ::interactors::crates::{QueryCrate, GetPopularCrates};
use ::interactors::RetrieveFileAtPath;
use ::interactors::github::GetPopularRepos;
use ::interactors::rustsec::FetchAdvisoryDatabase;
use self::futures::AnalyzeDependenciesFuture;
use self::futures::CrawlManifestFuture;
type HttpClient = Client<HttpsConnector<HttpConnector>>;
#[derive(Clone, Debug)]
pub struct Engine {
client: HttpClient,
logger: Logger,
metrics: StatsdClient,
query_crate: Arc<Cache<QueryCrate<HttpClient>>>,
get_popular_crates: Arc<Cache<GetPopularCrates<HttpClient>>>,
get_popular_repos: Arc<Cache<GetPopularRepos<HttpClient>>>,
retrieve_file_at_path: Arc<RetrieveFileAtPath<HttpClient>>,
fetch_advisory_db: Arc<Cache<FetchAdvisoryDatabase<HttpClient>>>
}
impl Engine {
pub fn new(client: Client<HttpsConnector<HttpConnector>>, logger: Logger) -> Engine {
let metrics = StatsdClient::from_sink("engine", NopMetricSink);
let query_crate = Cache::new(QueryCrate(client.clone()), Duration::from_secs(300), 500);
let get_popular_crates = Cache::new(GetPopularCrates(client.clone()), Duration::from_secs(10), 1);
let get_popular_repos = Cache::new(GetPopularRepos(client.clone()), Duration::from_secs(10), 1);
let fetch_advisory_db = Cache::new(FetchAdvisoryDatabase(client.clone()), Duration::from_secs(300), 1);
Engine {
client: client.clone(), logger, metrics,
query_crate: Arc::new(query_crate),
get_popular_crates: Arc::new(get_popular_crates),
get_popular_repos: Arc::new(get_popular_repos),
retrieve_file_at_path: Arc::new(RetrieveFileAtPath(client)),
fetch_advisory_db: Arc::new(fetch_advisory_db)
}
}
pub fn | <M: MetricSink + Send + Sync + 'static>(&mut self, sink: M) {
self.metrics = StatsdClient::from_sink("engine", sink);
}
}
pub struct AnalyzeDependenciesOutcome {
pub crates: Vec<(CrateName, AnalyzedDependencies)>,
pub duration: Duration
}
impl AnalyzeDependenciesOutcome {
pub fn any_outdated(&self) -> bool {
self.crates.iter().any(|&(_, ref deps)| deps.any_outdated())
}
pub fn any_insecure(&self) -> bool {
self.crates.iter().any(|&(_, ref deps)| deps.count_insecure() > 0)
}
pub fn outdated_ratio(&self) -> (usize, usize) {
self.crates.iter().fold((0, 0), |(outdated, total), &(_, ref deps)| {
(outdated + deps.count_outdated(), total + deps.count_total())
})
}
}
impl Engine {
pub fn get_popular_repos(&self) ->
impl Future<Item=Vec<Repository>, Error=Error>
{
self.get_popular_repos.call(())
.from_err().map(|repos| {
repos.iter()
.filter(|repo| !POPULAR_REPOS_BLACKLIST.contains(&repo.path))
.cloned().collect()
})
}
pub fn get_popular_crates(&self) ->
impl Future<Item=Vec<CratePath>, Error=Error>
{
self.get_popular_crates.call(())
.from_err().map(|crates| crates.clone())
}
pub fn analyze_repo_dependencies(&self, repo_path: RepoPath) ->
impl Future<Item=AnalyzeDependenciesOutcome, Error=Error>
{
let start = Instant::now();
let entry_point = RelativePath::new("/").to_relative_path_buf();
let manifest_future = CrawlManifestFuture::new(self, repo_path.clone(), entry_point);
let engine = self.clone();
manifest_future.and_then(move |manifest_output| {
let engine_for_analyze = engine.clone();
let futures = manifest_output.crates.into_iter().map(move |(crate_name, deps)| {
let analyzed_deps_future = AnalyzeDependenciesFuture::new(engine_for_analyze.clone(), deps);
analyzed_deps_future.map(move |analyzed_deps| (crate_name, analyzed_deps))
});
join_all(futures).and_then(move |crates| {
let duration = start.elapsed();
engine.metrics.time_duration_with_tags("analyze_duration", duration)
.with_tag("repo_site", repo_path.site.as_ref())
.with_tag("repo_qual", repo_path.qual.as_ref())
.with_tag("repo_name", repo_path.name.as_ref())
.send()?;
Ok(AnalyzeDependenciesOutcome {
crates, duration
})
})
})
}
pub fn analyze_crate_dependencies(&self, crate_path: CratePath) ->
impl Future<Item=AnalyzeDependenciesOutcome, Error=Error>
{
let start = Instant::now();
let query_future = self.query_crate.call(crate_path.name.clone()).from_err();
let engine = self.clone();
query_future.and_then(move |query_response| {
match query_response.releases.iter().find(|release| release.version == crate_path.version) {
None => future::Either::A(future::err(format_err!("could not find crate release with version {}", crate_path.version))),
Some(release) => {
let analyzed_deps_future = AnalyzeDependenciesFuture::new(engine.clone(), release.deps.clone());
future::Either::B(analyzed_deps_future.map(move |analyzed_deps| {
let crates = vec![(crate_path.name, analyzed_deps)].into_iter().collect();
let duration = start.elapsed();
AnalyzeDependenciesOutcome {
crates, duration
}
}))
}
}
})
}
pub fn find_latest_crate_release(&self, name: CrateName, req: VersionReq) ->
impl Future<Item=Option<CrateRelease>, Error=Error>
{
self.query_crate.call(name).from_err().map(move |query_response| {
query_response.releases.iter()
.filter(|release| req.matches(&release.version))
.max_by(|r1, r2| r1.version.cmp(&r2.version))
.cloned()
})
}
fn fetch_releases<I: IntoIterator<Item=CrateName>>(&self, names: I) ->
impl Iterator<Item=impl Future<Item=Vec<CrateRelease>, Error=Error>>
{
let engine = self.clone();
names.into_iter().map(move |name| {
engine.query_crate.call(name)
.from_err()
.map(|resp| resp.releases.clone())
})
}
fn retrieve_manifest_at_path(&self, repo_path: &RepoPath, path: &RelativePathBuf) ->
impl Future<Item=String, Error=Error>
{
let manifest_path = path.join(RelativePath::new("Cargo.toml"));
self.retrieve_file_at_path.call((repo_path.clone(), manifest_path))
}
fn fetch_advisory_db(&self) ->
impl Future<Item=Arc<AdvisoryDatabase>, Error=Error>
{
self.fetch_advisory_db.call(()).from_err().map(|db| db.clone())
}
}
lazy_static! {
static ref POPULAR_REPOS_BLACKLIST: HashSet<RepoPath> = {
vec![
RepoPath::from_parts("github", "rust-lang", "rust"),
RepoPath::from_parts("github", "google", "xi-editor"),
RepoPath::from_parts("github", "lk-geimfari", "awesomo"),
RepoPath::from_parts("github", "redox-os", "tfs"),
RepoPath::from_parts("github", "carols10cents", "rustlings"),
RepoPath::from_parts("github", "rust-unofficial", "awesome-rust")
].into_iter().collect::<Result<HashSet<_>, _>>().unwrap()
};
}
| set_metrics |
uptime.go | package metrics
import (
"io/ioutil"
"strconv"
"strings"
"github.com/mackerelio/mackerel-agent/logging"
)
/*
collect uptime
`uptime`: uptime[day] retrieved from /proc/uptime
graph: `uptime`
*/
type UptimeGenerator struct {
}
| contentbytes, err := ioutil.ReadFile("/proc/uptime")
if err != nil {
uptimeLogger.Errorf("Failed (skip these metrics): %s", err)
return nil, err
}
content := string(contentbytes)
cols := strings.Split(content, " ")
f, err := strconv.ParseFloat(cols[0], 64)
if err != nil {
uptimeLogger.Errorf("Failed to parse values (skip these metrics): %s", err)
return nil, err
}
return Values(map[string]float64{"uptime": f / 86400}), nil
} | var uptimeLogger = logging.GetLogger("metrics.uptime")
func (g *UptimeGenerator) Generate() (Values, error) { |
detector_test.go | // Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package lambda
import ( | "context"
"os"
"testing"
"github.com/stretchr/testify/assert"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/sdk/resource"
semconv "go.opentelemetry.io/otel/semconv/v1.4.0"
)
// successfully return resource when process is running on Amazon Lambda environment
func TestDetectSuccess(t *testing.T) {
os.Clearenv()
_ = os.Setenv(lambdaFunctionNameEnvVar, "testFunction")
_ = os.Setenv(awsRegionEnvVar, "us-texas-1")
_ = os.Setenv(lambdaFunctionVersionEnvVar, "$LATEST")
attributes := []attribute.KeyValue{
semconv.CloudProviderAWS,
semconv.CloudRegionKey.String("us-texas-1"),
semconv.FaaSNameKey.String("testFunction"),
semconv.FaaSVersionKey.String("$LATEST"),
}
expectedResource := resource.NewWithAttributes(semconv.SchemaURL, attributes...)
detector := resourceDetector{}
res, err := detector.Detect(context.Background())
assert.Nil(t, err, "Detector unexpectedly returned error")
assert.Equal(t, expectedResource, res, "Resource returned is incorrect")
}
// return empty resource when not running on lambda
func TestReturnsIfNoEnvVars(t *testing.T) {
os.Clearenv()
detector := resourceDetector{}
res, err := detector.Detect(context.Background())
assert.Equal(t, errNotOnLambda, err)
assert.Equal(t, 0, len(res.Attributes()))
} | |
cache-ui_grpc.pb.go | // Code generated by protoc-gen-go-grpc. DO NOT EDIT.
package v1
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// CacheUIClient is the client API for CacheUI service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type CacheUIClient interface {
// ListEngineSpecs returns a list of Cache Engine(s) that can be started through the UI.
ListEngineSpecs(ctx context.Context, in *ListEngineSpecsRequest, opts ...grpc.CallOption) (CacheUI_ListEngineSpecsClient, error)
// IsReadOnly returns true if the UI is readonly.
IsReadOnly(ctx context.Context, in *IsReadOnlyRequest, opts ...grpc.CallOption) (*IsReadOnlyResponse, error)
}
type cacheUIClient struct {
cc grpc.ClientConnInterface
}
func NewCacheUIClient(cc grpc.ClientConnInterface) CacheUIClient {
return &cacheUIClient{cc}
}
func (c *cacheUIClient) ListEngineSpecs(ctx context.Context, in *ListEngineSpecsRequest, opts ...grpc.CallOption) (CacheUI_ListEngineSpecsClient, error) {
stream, err := c.cc.NewStream(ctx, &CacheUI_ServiceDesc.Streams[0], "/v1.CacheUI/ListEngineSpecs", opts...)
if err != nil {
return nil, err
}
x := &cacheUIListEngineSpecsClient{stream}
if err := x.ClientStream.SendMsg(in); err != nil |
if err := x.ClientStream.CloseSend(); err != nil {
return nil, err
}
return x, nil
}
type CacheUI_ListEngineSpecsClient interface {
Recv() (*ListEngineSpecsResponse, error)
grpc.ClientStream
}
type cacheUIListEngineSpecsClient struct {
grpc.ClientStream
}
func (x *cacheUIListEngineSpecsClient) Recv() (*ListEngineSpecsResponse, error) {
m := new(ListEngineSpecsResponse)
if err := x.ClientStream.RecvMsg(m); err != nil {
return nil, err
}
return m, nil
}
func (c *cacheUIClient) IsReadOnly(ctx context.Context, in *IsReadOnlyRequest, opts ...grpc.CallOption) (*IsReadOnlyResponse, error) {
out := new(IsReadOnlyResponse)
err := c.cc.Invoke(ctx, "/v1.CacheUI/IsReadOnly", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// CacheUIServer is the server API for CacheUI service.
// All implementations must embed UnimplementedCacheUIServer
// for forward compatibility
type CacheUIServer interface {
// ListEngineSpecs returns a list of Cache Engine(s) that can be started through the UI.
ListEngineSpecs(*ListEngineSpecsRequest, CacheUI_ListEngineSpecsServer) error
// IsReadOnly returns true if the UI is readonly.
IsReadOnly(context.Context, *IsReadOnlyRequest) (*IsReadOnlyResponse, error)
mustEmbedUnimplementedCacheUIServer()
}
// UnimplementedCacheUIServer must be embedded to have forward compatible implementations.
type UnimplementedCacheUIServer struct {
}
func (UnimplementedCacheUIServer) ListEngineSpecs(*ListEngineSpecsRequest, CacheUI_ListEngineSpecsServer) error {
return status.Errorf(codes.Unimplemented, "method ListEngineSpecs not implemented")
}
func (UnimplementedCacheUIServer) IsReadOnly(context.Context, *IsReadOnlyRequest) (*IsReadOnlyResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method IsReadOnly not implemented")
}
func (UnimplementedCacheUIServer) mustEmbedUnimplementedCacheUIServer() {}
// UnsafeCacheUIServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to CacheUIServer will
// result in compilation errors.
type UnsafeCacheUIServer interface {
mustEmbedUnimplementedCacheUIServer()
}
func RegisterCacheUIServer(s grpc.ServiceRegistrar, srv CacheUIServer) {
s.RegisterService(&CacheUI_ServiceDesc, srv)
}
func _CacheUI_ListEngineSpecs_Handler(srv interface{}, stream grpc.ServerStream) error {
m := new(ListEngineSpecsRequest)
if err := stream.RecvMsg(m); err != nil {
return err
}
return srv.(CacheUIServer).ListEngineSpecs(m, &cacheUIListEngineSpecsServer{stream})
}
type CacheUI_ListEngineSpecsServer interface {
Send(*ListEngineSpecsResponse) error
grpc.ServerStream
}
type cacheUIListEngineSpecsServer struct {
grpc.ServerStream
}
func (x *cacheUIListEngineSpecsServer) Send(m *ListEngineSpecsResponse) error {
return x.ServerStream.SendMsg(m)
}
func _CacheUI_IsReadOnly_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(IsReadOnlyRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(CacheUIServer).IsReadOnly(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/v1.CacheUI/IsReadOnly",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(CacheUIServer).IsReadOnly(ctx, req.(*IsReadOnlyRequest))
}
return interceptor(ctx, in, info, handler)
}
// CacheUI_ServiceDesc is the grpc.ServiceDesc for CacheUI service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var CacheUI_ServiceDesc = grpc.ServiceDesc{
ServiceName: "v1.CacheUI",
HandlerType: (*CacheUIServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "IsReadOnly",
Handler: _CacheUI_IsReadOnly_Handler,
},
},
Streams: []grpc.StreamDesc{
{
StreamName: "ListEngineSpecs",
Handler: _CacheUI_ListEngineSpecs_Handler,
ServerStreams: true,
},
},
Metadata: "cache-ui.proto",
}
| {
return nil, err
} |
test_save.py | """
Module to run tests on arsave
"""
import os
import numpy as np
import pytest
from astropy import units
from astropy.io import fits
from pypeit import specobjs
from pypeit.core import save
from pypeit.tests.tstutils import dummy_fitstbl
from pypeit.spectrographs import util
def data_path(filename):
data_dir = os.path.join(os.path.dirname(__file__), 'files')
return os.path.join(data_dir, filename)
def mk_specobj(flux=5, objid=500):
# specobj
npix = 100
specobj = specobjs.SpecObj((100,100), 0, (0.4,0.6), objtype='science',
spat_pixpos=300)
specobj.boxcar = dict(wave=np.arange(npix)*units.AA, counts=np.ones(npix)*flux)
specobj.optimal = dict(wave=np.arange(npix)*units.AA, counts=np.ones(npix)*flux-0.5)
specobj.objid = objid
specobj.trace_spat = np.arange(npix) / npix
specobj.fwhmfit = np.arange(npix) / npix
# Return
return specobj
def | ():
#settings.dummy_settings()
#fitsdict = arutils.dummy_fitsdict(nfile=1, spectrograph='none', directory=data_path(''))
fitstbl = dummy_fitstbl(directory=data_path(''))
# Kludge
fitstbl.table.remove_column('filename')
fitstbl['filename'] = 'b1.fits.gz'
# Settings
#settings.argflag['run']['directory']['science'] = data_path('')
spectrograph = 'shane_kast_blue'
# Fill with dummy images
dum = np.ones((100,100))
sci_dict = {}
sci_dict[0] = {}
sci_dict[0]['sciframe'] = dum
sci_dict[0]['finalvar'] = dum * 2
sci_dict[0]['finalsky'] = dum + 0.1
sci_dict['meta'] = {}
sci_dict['meta']['vel_corr'] = 0.
sci_dict['meta']['ir_redux'] = False
basename = 'test'
scidx = 5
path = fitstbl['directory'][scidx]
ifile = fitstbl['filename'][scidx]
rawfile = os.path.join(path, ifile)
master_dir = data_path('MF')+'_'+spectrograph
outfile = data_path('') + 'spec2d_{:s}.fits'.format(basename)
# Create a dummy master_key_dict
master_key_dict = dict(frame='', bpm='bpmkey',bias='',arc='',trace='',flat='')
raw_hdr = fits.open(rawfile)[0].header
save.save_2d_images(sci_dict, raw_hdr, spectrograph, master_key_dict, master_dir, outfile)
# Read and test
head0 = fits.getheader(data_path('spec2d_test.fits'))
assert head0['PYPMFDIR'] == master_dir
assert head0['BPMMKEY'] == 'bpm' # See save_2d_images; removes last 3 characters
assert 'PYPEIT' in head0['PIPELINE']
def test_save1d_fits():
""" save1d to FITS and HDF5
"""
# Init
fitstbl = dummy_fitstbl(spectro_name='shane_kast_blue', directory=data_path(''))
sobj = mk_specobj()
specObjs = specobjs.SpecObjs([sobj])
spectrograph = util.load_spectrograph('shane_kast_blue')
# Write to FITS
basename = 'test'
outfile = data_path('') + 'spec1d_{:s}.fits'.format(basename)
save.save_1d_spectra_fits(specObjs, fitstbl[5], spectrograph, outfile)
# NEEDS REFACTORING
#def test_save1d_hdf5():
# """ save1d to FITS and HDF5
# """
# # Dummy self
# fitstbl = arsort.dummy_fitstbl(spectrograph='shane_kast_blue', directory=data_path(''))
# slf = arsciexp.dummy_self(fitstbl=fitstbl)
# # specobj
# slf._specobjs = []
# slf._specobjs.append([])
# slf._specobjs[0].append([mk_specobj(objid=455), mk_specobj(flux=3., objid=555)])
# # Write to HDF5
# arsave.save_1d_spectra_hdf5(slf, fitstbl)
| test_save2d_fits |
gestion.component.ts | import { Component } from '@angular/core';
import { AssetsService } from '../../../@core/data/assets.service';
import { AwsTransformService } from '../../../@core/utils/awsTransform.service';
import { LocalDataSource } from 'ng2-smart-table';
@Component({
selector: 'ngx-gestion',
templateUrl: './gestion.component.html',
styleUrls: ['./gestion.component.scss'],
styles: [`
nb-card {
transform: translate3d(0, 0, 0);
}
`],
})
export class GestionComponent {
source: LocalDataSource;
projectAWS: any;
data: any;
project: any;
column: any;
settings = {
add: {
addButtonContent: '<i class="nb-plus"></i>',
createButtonContent: '<i class="nb-checkmark"></i>',
cancelButtonContent: '<i class="nb-close"></i>',
confirmCreate: true,
},
edit: {
editButtonContent: '<i class="nb-edit"></i>',
saveButtonContent: '<i class="nb-checkmark"></i>',
cancelButtonContent: '<i class="nb-close"></i>',
confirmSave: true,
},
delete: {
deleteButtonContent: '<i class="nb-trash"></i>',
confirmDelete: true,
},
columns:{},
}
constructor(private assetsService: AssetsService) {
this.data = [];
}
getOrg(event): void {
this.projectAWS = event;
if (this.projectAWS.Item.id !== undefined) {
this.assetsService.getAssets(this.projectAWS.Item.id.S)
.subscribe(res => {
this.data = AwsTransformService.getNormalArray(res);
this.source = new LocalDataSource(this.data);
});
this.assetsService.getSettings(this.projectAWS.Item.id.S)
.subscribe(res => {
this.settings = {
add: {
addButtonContent: '<i class="nb-plus"></i>',
createButtonContent: '<i class="nb-checkmark"></i>',
cancelButtonContent: '<i class="nb-close"></i>',
confirmCreate: true,
},
edit: {
editButtonContent: '<i class="nb-edit"></i>', | },
delete: {
deleteButtonContent: '<i class="nb-trash"></i>',
confirmDelete: true,
},
columns: AwsTransformService.getColumnTable(res),
};
});
}
}
onCreateConfirm(event): void {
if (event.newData.serial !== '') {
if (window.confirm('Seguro que desea crear el activo Serial: ' + event.newData.serial)) {
this.assetsService.addAsset(event.newData)
.subscribe(res => {
event.confirm.resolve();
});
} else {
event.confirm.reject();
}
} else {
alert('Serial no puede ser vacio');
}
}
onEditConfirm(event): void {
if (window.confirm('Seguro que desea editar el activo Modelo: ' +
event.data.modelo + ' Serial: ' + event.data.serial)) {
this.assetsService.EditAsset(event.newData)
.subscribe(res => {
event.confirm.resolve();
});
} else {
event.confirm.reject();
}
}
} | saveButtonContent: '<i class="nb-checkmark"></i>',
cancelButtonContent: '<i class="nb-close"></i>',
confirmSave: true, |
day8.py | import itertools
from aocd import get_data, submit
DAY = 8
YEAR = 2021
def part1(data: str) -> str:
lines = data.splitlines()
ans = 0
for line in lines:
left, right = line.split('|')
segments = left.split(' ')
code = right.split(' ')
for item in code:
if len(item) in [2, 3, 4, 7]:
ans += 1
return str(ans)
def part2(data: str) -> str:
lines = data.splitlines()
valids = [set("abcefg"), set("cf"), set("acdeg"), set("acdfg"), set("bcdf"), set("abdfg"), set("abdefg"), set("acf"), set("abcdefg"), set("abcdfg")]
ans = 0
for line in lines:
left, right = line.split('|')
segments = left.strip().split(' ')
code = right.strip().split(' ')
for perm in itertools.permutations("abcdefg"):
mapping = {"abcdefg"[i]: perm[i] for i in range(7)}
ok = True
for index, segment in enumerate(segments):
mapped = set()
for char in segment:
mapped.add(mapping[char])
if mapped not in valids:
ok = False
break
if ok:
decoded = 0
for segment in code:
decoded *= 10
mapped = set()
for char in segment:
mapped.add(mapping[char])
digit = valids.index(mapped)
decoded += digit
ans += decoded
break
return str(ans)
if __name__ == '__main__':
input_data = get_data(day=DAY, year=YEAR)
ans1 = part1(input_data) | #submit(answer=ans1, day=DAY, year=YEAR, part=1)
ans2 = part2(input_data)
submit(answer=ans2, day=DAY, year=YEAR, part=2) | |
fake_manager.go | /**
* Copyright (C) 2015 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package secret
import (
"k8s.io/api/core/v1"
)
// fakeManager implements Manager interface for testing purposes.
// simple operations to apiserver.
type fakeManager struct {
}
func NewFakeManager() Manager {
return &fakeManager{}
}
func (s *fakeManager) GetSecret(namespace, name string) (*v1.Secret, error) {
return nil, nil
}
|
func (s *fakeManager) UnregisterPod(pod *v1.Pod) {
} | func (s *fakeManager) RegisterPod(pod *v1.Pod) {
} |
build.py | import copy
from typing import Any, MutableMapping, MutableSequence, Union
from .data import DataGetValue, Data, BaseData
from .exception import InvalidOperationError
class DataBuilder:
def build_prop(self, data: Union[MutableMapping, MutableSequence], key: Any) -> None:
|
def build(self, data: Any, in_place: bool = True) -> Any:
"""
Cleanup all instances of Data classes, removing if not enabled or replacing by its value.
:param data: the data to mutate
:param in_place: whether to modify the data in-place. If False, data will be duplicated
using copy.deepcopy
:return: the same value passed, mutated, except if it is *Data{enabled=False}*, in this case it returns None.
"""
if not in_place:
data = copy.deepcopy(data)
if isinstance(data, MutableMapping):
keylist = list(data.keys())
for key in keylist:
self.build_prop(data, key)
for item in data.values():
self.build(item)
return data
elif isinstance(data, MutableSequence):
for key in range(len(data) - 1, -1, -1):
self.build_prop(data, key)
for item in data:
self.build(item)
return data
return self.get_value(data)
def get_value(self, data: Any) -> Any:
return DataGetValue(data)
def BuildData(data: Any, in_place: bool = True) -> Any:
"""
Cleanup all instances of Data classes, removing if not enabled or replacing by its value.
:param data: the data to mutate
:param in_place: whether to modify the data in-place. If False, data will be duplicated
using copy.deepcopy
:return: the same value passed, mutated, except if it is *Data{enabled=False}*, in this case it returns None.
"""
return DataBuilder().build(data, in_place=in_place)
| """
Cleanup instances of Data class in Mapping or Sequence.
"""
if isinstance(data[key], BaseData):
if isinstance(data[key], Data):
if not data[key].is_enabled():
del data[key]
else:
data[key] = data[key].get_value()
else:
raise InvalidOperationError('Cannot use BaseData in build') |
main.rs | use std::sync::Arc;
use gtk::prelude::*;
| Ok(api) => Arc::new(api),
Err(err) => panic!("{}", err),
};
let gtk_app = lockbook_desktop_gtk::new_gtk_app(api);
gtk_app.run();
} | fn main() {
let api = match lb::DefaultApi::new() { |
stndata.py | from datetime import datetime, timedelta
from bisect import bisect_left
import numpy.ma as ma
from cdippy.cdippy import CDIPnc, Archive, Realtime, RealtimeXY, Historic
import cdippy.timestamp_utils as tsu
import cdippy.utils as cu
class StnData(CDIPnc):
"""
Returns data and metadata for the specified station.
This class handles the problem that neither the Realtime
nor the Historic .nc file may exist for either data or metadata,
and the number of deployment files is unknown apriori.
It tries to seam the multiple station files together.
"""
max_deployments = 99 # Checks at most this number of deployment nc files
# Commonly requested sets of variables
parameter_vars = ['waveHs', 'waveTp', 'waveDp', 'waveTa']
xyz_vars = ['xyzXDisplacement', 'xyzYDisplacement', 'xyzZDisplacement']
spectrum_vars = [
'waveEnergyDensity', 'waveMeanDirection',
'waveA1Value', 'waveB1Value', 'waveA2Value', 'waveB2Value',
'waveCheckFactor',]
meta_vars = [
'metaStationName',
'metaDeployLatitude', 'metaDeployLongitude', 'metaWaterDepth',
'metaDeclilnation']
meta_attributes = [
'wmo_id',
'geospatial_lat_min', 'geospatial_lat_max', 'geospatial_lat_units', 'geospatial_lat_resolution',
'geospatial_lon_min', 'geospatial_lon_max', 'geospatial_lon_units', 'geospatial_lon_resolution',
'geospatial_vertical_min', 'geospatial_vertical_max', 'geospatial_vertical_units', 'geospatial_vertical_resolution',
'time_coverage_start', 'time_coverage_end',
'date_created', 'date_modified' ]
def __init__(cls, stn, data_dir=None, org=None):
cls.nc = None
cls.stn = stn
cls.data_dir = data_dir
cls.org = org
cls.historic = Historic(cls.stn, cls.data_dir, cls.org)
cls.realtime = Realtime(cls.stn, cls.data_dir, cls.org)
if cls.historic and cls.historic.nc :
cls.meta = cls.historic
else:
if cls.realtime and cls.realtime.nc :
cls.meta = cls.realtime
else:
return None
def get_parameters(cls, start=None, end=None, pub_set='public', apply_mask=True, target_records=0):
return cls.get_series(start, end, cls.parameter_vars, pub_set, apply_mask, target_records)
def get_stn_meta(cls):
""" Returns a dict of station meta data using historic or realtime file. """
result = {}
if cls.meta is None:
return result
cls.meta.set_request_info(vrs=cls.meta_vars)
result = cls.meta.get_request()
for attr_name in cls.meta_attributes:
if hasattr(cls.meta.nc, attr_name):
result[attr_name] = getattr(cls.meta.nc, attr_name)
return result
def get_xyz(cls, start=None, end=None, pub_set='public'):
return cls.get_series(start, end, cls.xyz_vars, pub_set)
def get_spectra(cls, start=None, end=None, pub_set='public', apply_mask=True, target_records=0):
return cls.get_series(start, end, cls.spectrum_vars, pub_set, apply_mask, target_records)
def get_series(cls, start=None, end=None, vrs=None, pub_set='public', apply_mask=True, target_records=0):
"""
Returns a dict of data between start and end dates with specified quality.
Use this to get series that may span realtime and historic files.
If end is None, then start is considered a target date.
"""
if vrs is None:
vrs = cls.parameter_vars
prefix = cls.get_var_prefix(vrs[0])
if start is not None and end is None: # Target time
ts_I = cls.get_target_timespan(cu.datetime_to_timestamp(start), target_records, prefix+'Time')
if ts_I[0] is not None:
start = cu.timestamp_to_datetime(ts_I[0])
end = cu.timestamp_to_datetime(ts_I[1])
else:
return None
elif start is None: # Use default 3 days back
start = datetime.utcnow()-timedelta(days=3)
end = datetime.utcnow()
cls.set_request_info(start, end, vrs, pub_set, apply_mask)
if vrs is not None and prefix == 'xyz':
return cls.merge_xyz_request()
else:
return cls.merge_request()
def aggregate_dicts(cls, dict1, dict2):
""" Aggregate the data in two dictionaries. Dict1 has oldest data. """
#- Union the keys to make sure we check each one
ukeys = set(dict1.keys()) | set(dict2.keys())
result = { }
#- Combine the variables
for key in ukeys :
if key in dict2 and key in dict1:
result[key] = ma.concatenate([dict1[key], dict2[key]])
elif key in dict2:
result[key] = dict2[key]
else:
result[key] = dict1[key]
return result
def merge_xyz_request(cls):
""" Merge xyz data from realtime and archive nc files. """
if cls.vrs and cls.vrs[0] == 'xyzData':
cls.vrs = ['xyzXDisplacement','xyzYDisplacement','xyzZDisplacement']
request_timespan = cu.Timespan(cls.start_stamp, cls.end_stamp)
result = {}
def helper(cdip_nc, request_timespan, result):
# Try the next file if it is without xyz data
z = cdip_nc.get_var('xyzZDisplacement')
if z is None:
return result, cls.start_stamp
# Try the next file if start_stamp cannot be calculated
start_stamp = cdip_nc.get_xyz_timestamp(0)
end_stamp = cdip_nc.get_xyz_timestamp(len(z)-1)
if start_stamp is None:
return result, cls.start_stamp
file_timespan = cu.Timespan(start_stamp, end_stamp)
# Add data if request timespan overlaps data timespan
if request_timespan.overlap(file_timespan):
cdip_nc.start_stamp = cls.start_stamp
cdip_nc.end_stamp = cls.end_stamp
cdip_nc.pub_set = cls.pub_set
cdip_nc.apply_mask = cls.apply_mask
cdip_nc.vrs = cls.vrs
tmp_result = cdip_nc.get_request()
result = cls.aggregate_dicts(result, tmp_result)
return result, start_stamp
# First get realtime data if it exists
rt = RealtimeXY(cls.stn)
if rt.nc is not None:
result, start_stamp = helper(rt, request_timespan, result)
# If the request start time is more recent than the realtime
# start time, no need to look in the archives
if cls.start_stamp > start_stamp:
return result
# Second, look in archive files for data
for dep in range(1, cls.max_deployments):
deployment = 'd'+'{:02d}'.format(dep)
ar = Archive(cls.stn, deployment, cls.data_dir, cls.org)
if ar.nc is None:
break
result, start_stamp = helper(ar, request_timespan, result)
# Break if file start stamp is greater than request end stamp
if start_stamp > cls.end_stamp :
break
return result
def merge_request(cls):
""" Returns data for given request across realtime and historic files """
rt = {};
r = cls.realtime
# Note that we are assuming that waveTime will work for every time dim.
if r.nc is not None and r.get_var('waveTime')[0] <= cls.end_stamp:
r.vrs = cls.vrs
r.start_stamp = cls.start_stamp
r.end_stamp = cls.end_stamp
r.pub_set = cls.pub_set
r.apply_mask = cls.apply_mask
rt = r.get_request()
ht = {};
h = cls.historic
if h.nc is not None and h.get_var('waveTime')[-1] >= cls.start_stamp:
h.vrs = cls.vrs
h.start_stamp = cls.start_stamp
h.end_stamp = cls.end_stamp
h.pub_set = cls.pub_set
h.apply_mask = cls.apply_mask
ht = h.get_request()
return cls.aggregate_dicts(ht, rt)
def get_nc_files(cls, types=['realtime','historic','archive']):
""" Returns dict of netcdf4 objects of a station's netcdf files """
result = {}
for type in types:
if type == 'realtime':
rt = Realtime(cls.stn, cls.data_dir, cls.org)
if rt.nc:
result[rt.filename] = rt.nc
if type == 'historic':
ht = Historic(cls.stn, cls.data_dir, cls.org)
if ht.nc:
result[ht.filename] = ht.nc
if type == 'archive':
for dep in range(1,cls.max_deployments):
deployment = 'd'+'{:02d}'.format(dep)
ar = Archive(cls.stn, deployment, cls.data_dir, cls.org)
if ar.nc is None:
break
result[ar.filename] = ar
return result
def | (cls, target_timestamp, n, time_var):
"""
Returns a 2-tuple of timestamps, an interval corresponding to n records to
the right or left of target_timestamp.
Given a time_var (e.g. 'waveTime') and target timestamp, returns a 2-tuple
of timestamps corresponding to i and i+n (n<0 or n>=0) taken from
the realtime and historic nc files. Those timestamps can then be used in
set_request_info().
"""
r_ok = False
if cls.realtime.nc is not None:
r_ok = True
h_ok = False
if cls.historic.nc is not None:
h_ok = True
# Check realtime to find closest index
r_closest_idx = None
if r_ok:
r_stamps = cls.realtime.get_var(time_var)[:]
r_last_idx = len(r_stamps) - 1
i_b = bisect_left(r_stamps, target_timestamp)
# i_b will be possibly one more than the last index
i_b = min(i_b, r_last_idx)
# Target timestamp is exactly equal to a data time
if i_b == r_last_idx or r_stamps[i_b] == target_timestamp:
r_closest_idx = i_b
elif i_b > 0:
r_closest_idx = tsu.get_closest_index(i_b-1, i_b, r_stamps, target_timestamp)
# If closest index not found, check historic
h_closest_idx = None
h_last_idx = None # Let's us know if h_stamps has been loaded
if h_ok and not r_closest_idx:
h_stamps = cls.historic.get_var(time_var)[:]
h_last_idx = len(h_stamps) - 1
i_b = bisect_left(h_stamps, target_timestamp)
i_b = min(i_b, h_last_idx)
# Target timestamp is exactly equal to a data time
if (i_b <= h_last_idx and h_stamps[i_b] == target_timestamp) or i_b == 0:
h_closest_idx = i_b
elif i_b >= h_last_idx: # Target is between the two files
if r_ok:
if abs(h_stamps[h_last_idx]-target_timestamp) < abs(r_stamps[0]-target_timestamp):
h_closest_idx = i_b
else:
r_closest_idx = 0
else: # No realtime file
h_closest_idx = i_b
else: # Within middle of historic stamps
h_closest_idx = tsu.get_closest_index(i_b-1, i_b, h_stamps, target_timestamp)
# Now we have the closest index, find the intervals
if r_closest_idx is not None:
r_interval = tsu.get_interval(r_stamps, r_closest_idx, n)
# If bound exceeded toward H and H exists, cacluate h_interval
if r_interval[2] < 0 and h_ok:
if not h_last_idx:
h_stamps = cls.historic.get_var(time_var)[:]
h_last_idx = len(h_stamps) - 1
h_interval = tsu.get_interval(h_stamps, h_last_idx, n+r_closest_idx+1)
#print("Rx H interval: ", h_interval)
#print("Rx R interval: ", r_interval)
return tsu.combine_intervals(h_interval, r_interval)
else:
return r_interval
elif h_closest_idx is not None:
h_interval = tsu.get_interval(h_stamps, h_closest_idx, n)
# If bound exceeded toward R and R exists, cacluate r_interval
if h_interval[2] > 0 and r_ok:
r_interval = tsu.get_interval(r_stamps, 0, n+h_closest_idx-h_last_idx-1)
#print("Hx H interval: ", h_interval)
#print("Hx R interval: ", r_interval)
return tsu.combine_intervals(h_interval, r_interval)
else:
return h_interval
# If we get to here there's a problem
return (None, None, None)
if __name__ == "__main__":
#- Tests
def t0():
s = StnData('100p1')
d = s.get_stn_meta()
print(d)
def t1():
s = StnData('100p1')
d = s.get_spectra(datetime(2016,8,1), target_records=3)
print(d.keys())
print(d['waveEnergyDensity'].shape)
def t2():
s = StnData('100p1',org='ww3')
d = s.get_series('2016-08-01 00:00:00','2016-08-02 23:59:59',['waveHs'],'public')
print(d)
def t3():
s = StnData('100p1',data_dir='./gdata')
d = s.get_nc_files(['historic','archive','realtime'])
print(d.keys())
def t4():
s = StnData('100p1')
# Across deployments 5 and 6
d = s.get_series('2007-05-30 00:00:00','2007-06-01 23:59:59',['xyzData'],'public')
print(len(d['xyzXDisplacement']))
print(len(d['xyzTime']))
print(d['xyzTime'][0],d['xyzTime'][-1])
def t5():
s = StnData('100p1')
dt = datetime(2010,4,1,0,0)
d = s.get_series(dt, target_records=-4)
print(d)
def t6():
# Mark 1 filter delay set to -999.9
s = StnData('071p1')
end = datetime.utcnow()
end = datetime(1996,1,22,15,57,00)
start = end - timedelta(hours=2)
d = s.get_xyz(start, end)
print("D: "+repr(d))
print("Len: "+repr(len(d['xyzTime'])))
t6()
| get_target_timespan |
easy_casino_learn.py | import numpy as np
import matplotlib.pyplot as plt
from ..easy_casino import Casino
from ..hmm_multinoulli import HMMMultinoulli
hmm = HMMMultinoulli(Casino.A, Casino.PX, Casino.INIT)
# generate sequence
seq_length = 300
batch_size = 500
xs_batch = []
zs_batch = []
for j in range(batch_size): | zs = [casino.z]
for i in range(seq_length - 1):
casino.transition()
xs.append(casino.observe())
zs.append(casino.z)
xs_batch.append(xs)
zs_batch.append(zs)
xs_batch = np.array(xs_batch)
zs_batch = np.array(zs_batch)
num_hidden_states = len(np.unique(zs_batch))
# learn
hmm.initialize_em(2, 6)
for i in range(200):
# learn
print("step", i)
print(hmm.A)
print(hmm.init)
print(hmm.PX)
print()
ll = hmm.learn_em(xs_batch)
print("log likelihood:", ll)
print()
# calculate probabilities
alphas, log_evidence, betas, gammas, etas = hmm.forward_backward(xs_batch[0])
# plot alphas and gammas
plot_zs = np.array(zs_batch[0])
plot_alphas = alphas[:, 1]
plot_gammas = gammas[:, 1]
plot_xs = np.linspace(1, len(plot_zs), num=len(plot_zs))
plt.figure(figsize=(12, 9))
plt.subplot(2, 1, 1)
plt.title("filtering")
plt.plot(plot_xs, plot_zs, label="z")
plt.plot(plot_xs, plot_alphas, label="P(z) = 1")
plt.legend()
plt.subplot(2, 1, 2)
plt.title("smoothing")
plt.plot(plot_xs, plot_zs, label="z")
plt.plot(plot_xs, plot_gammas, label="P(z) = 1")
plt.legend()
plt.show() | casino = Casino()
xs = [casino.observe()] |
Tabs.tsx | import React from 'react';
import ScrollableTabView, { DefaultTabBar } from 'react-native-scrollable-tab-view';
import styles from './style/';
import TabsProps from './PropsType';
class Tabs extends React.Component<TabsProps, any> {
static defaultProps = {
tabBarPosition: 'top',
animated: true,
swipeable: true,
onChange() {},
onTabClick() {},
underlineColor: '#ddd',
activeUnderlineColor: '#108ee9',
textColor: '#000',
activeTextColor: '#108ee9',
styles: styles,
barStyle: null,
};
static TabPane: any;
activeIndex: number;
constructor(props) {
super(props);
this.activeIndex = 0;
}
onTabClick = ({ i }) => {
const key = this.getKey(i);
const { onTabClick, onChange } = this.props;
if (onTabClick) {
onTabClick(key);
}
if (this.activeIndex !== i) {
if (onChange) {
onChange(key);
}
this.activeIndex = i;
}
}
getContents() {
const { children } = this.props;
const newChildren: any[] = [];
React.Children.forEach(children as any, (child: any, idx) => {
newChildren.push(React.cloneElement(child, {
key: idx,
tabLabel: child.props.tab,
children: child.props.children,
}));
});
return newChildren;
}
getKey(index) {
const children = this.props.children;
let key = '';
React.Children.forEach(children as any, (child: any, idx) => { | if (index === idx) {
key = child.key;
}
});
return key;
}
renderTabBar = () => {
const {
tabBarPosition, underlineColor, activeUnderlineColor, textColor, activeTextColor, styles, barStyle,
} = this.props;
const barBaseStyle = tabBarPosition === 'top' ? styles.barTop : styles.barBottom;
const linePosition = tabBarPosition === 'top' ? {} : { top: -1 };
const underlineStyle = [styles.underline, {
bottom: tabBarPosition === 'top' ? -1 : null,
backgroundColor: activeUnderlineColor,
}, linePosition];
return (
<DefaultTabBar
activeTextColor={activeTextColor}
inactiveTextColor={textColor}
style={[barBaseStyle, { borderColor: underlineColor }, barStyle ]}
textStyle={[styles.text]}
tabStyle={[styles.tab]}
underlineStyle={underlineStyle}
/>
);
}
render() {
const {
tabBarPosition, defaultActiveKey, activeKey, animated, children, swipeable,
} = this.props;
let defaultActiveIndex = 0;
let activeIndex = 0;
React.Children.forEach(children as any, (child: any, idx) => {
if (defaultActiveKey === child.key) {
defaultActiveIndex = idx;
}
if (activeKey === child.key) {
activeIndex = idx;
}
});
this.activeIndex = activeIndex;
return (
<ScrollableTabView
tabBarPosition={tabBarPosition}
scrollWithoutAnimation={!animated}
initialPage={defaultActiveIndex}
page={activeIndex}
locked={swipeable}
renderTabBar={this.renderTabBar}
onChangeTab={this.onTabClick}
>
{this.getContents()}
</ScrollableTabView>
);
}
}
export default Tabs; | |
payment.py | # coding: utf-8
# Copyright 2015 Eezee-It
import json
import logging
from hashlib import sha256
import urlparse
from odoo import models, fields, api
from odoo.tools.float_utils import float_compare
from odoo.tools.translate import _
from odoo.addons.payment.models.payment_acquirer import ValidationError
from odoo.addons.payment_sips.controllers.main import SipsController
_logger = logging.getLogger(__name__)
CURRENCY_CODES = {
'EUR': '978',
'USD': '840',
'CHF': '756',
'GBP': '826',
'CAD': '124',
'JPY': '392',
'MXN': '484',
'TRY': '949',
'AUD': '036',
'NZD': '554',
'NOK': '578',
'BRL': '986',
'ARS': '032',
'KHR': '116',
'TWD': '901',
} | _inherit = 'payment.acquirer'
provider = fields.Selection(selection_add=[('sips', 'Sips')])
sips_merchant_id = fields.Char('SIPS API User Password', required_if_provider='sips', groups='base.group_user')
sips_secret = fields.Char('SIPS Secret', size=64, required_if_provider='sips', groups='base.group_user')
def _get_sips_urls(self, environment):
""" Worldline SIPS URLS """
url = {
'prod': 'https://payment-webinit.sips-atos.com/paymentInit',
'test': 'https://payment-webinit.simu.sips-atos.com/paymentInit', }
return {'sips_form_url': url.get(environment, url['test']), }
def _sips_generate_shasign(self, values):
""" Generate the shasign for incoming or outgoing communications.
:param dict values: transaction values
:return string: shasign
"""
if self.provider != 'sips':
raise ValidationError(_('Incorrect payment acquirer provider'))
data = values['Data']
# Test key provided by Worldine
key = u'002001000000001_KEY1'
if self.environment == 'prod':
key = getattr(self, 'sips_secret')
shasign = sha256(data + key)
return shasign.hexdigest()
@api.multi
def sips_form_generate_values(self, values):
self.ensure_one()
base_url = self.env['ir.config_parameter'].sudo().get_param('web.base.url')
currency = self.env['res.currency'].sudo().browse(values['currency_id'])
currency_code = CURRENCY_CODES.get(currency.name, False)
if not currency_code:
raise ValidationError(_('Currency not supported by Wordline'))
amount = int(values['amount'] * 100)
if self.environment == 'prod':
# For production environment, key version 2 is required
merchant_id = getattr(self, 'sips_merchant_id')
key_version = '2'
else:
# Test key provided by Atos Wordline works only with version 1
merchant_id = '002001000000001'
key_version = '1'
sips_tx_values = dict(values)
sips_tx_values.update({
'Data': u'amount=%s|' % amount +
u'currencyCode=%s|' % currency_code +
u'merchantId=%s|' % merchant_id +
u'normalReturnUrl=%s|' % urlparse.urljoin(base_url, SipsController._return_url) +
u'automaticResponseUrl=%s|' % urlparse.urljoin(base_url, SipsController._return_url) +
u'transactionReference=%s|' % values['reference'] +
u'statementReference=%s|' % values['reference'] +
u'keyVersion=%s' % key_version,
'InterfaceVersion': 'HP_2.3',
})
return_context = {}
if sips_tx_values.get('return_url'):
return_context[u'return_url'] = u'%s' % sips_tx_values.pop('return_url')
return_context[u'reference'] = u'%s' % sips_tx_values['reference']
sips_tx_values['Data'] += u'|returnContext=%s' % (json.dumps(return_context))
shasign = self._sips_generate_shasign(sips_tx_values)
sips_tx_values['Seal'] = shasign
return sips_tx_values
@api.multi
def sips_get_form_action_url(self):
self.ensure_one()
return self._get_sips_urls(self.environment)['sips_form_url']
class TxSips(models.Model):
_inherit = 'payment.transaction'
_sips_valid_tx_status = ['00']
_sips_wait_tx_status = ['90', '99']
_sips_refused_tx_status = ['05', '14', '34', '54', '75', '97']
_sips_error_tx_status = ['03', '12', '24', '25', '30', '40', '51', '63', '94']
_sips_pending_tx_status = ['60']
_sips_cancel_tx_status = ['17']
# --------------------------------------------------
# FORM RELATED METHODS
# --------------------------------------------------
def _sips_data_to_object(self, data):
res = {}
for element in data.split('|'):
element_split = element.split('=')
res[element_split[0]] = element_split[1]
return res
@api.model
def _sips_form_get_tx_from_data(self, data):
""" Given a data dict coming from sips, verify it and find the related
transaction record. """
data = self._sips_data_to_object(data.get('Data'))
reference = data.get('transactionReference')
if not reference:
custom = json.loads(data.pop('returnContext', False) or '{}')
reference = custom.get('reference')
payment_tx = self.search([('reference', '=', reference)])
if not payment_tx or len(payment_tx) > 1:
error_msg = _('Sips: received data for reference %s') % reference
if not payment_tx:
error_msg += _('; no order found')
else:
error_msg += _('; multiple order found')
_logger.error(error_msg)
raise ValidationError(error_msg)
return payment_tx
@api.multi
def _sips_form_get_invalid_parameters(self, data):
invalid_parameters = []
data = self._sips_data_to_object(data.get('Data'))
# TODO: txn_id: should be false at draft, set afterwards, and verified with txn details
if self.acquirer_reference and data.get('transactionReference') != self.acquirer_reference:
invalid_parameters.append(('transactionReference', data.get('transactionReference'), self.acquirer_reference))
# check what is bought
if float_compare(float(data.get('amount', '0.0')) / 100, self.amount, 2) != 0:
invalid_parameters.append(('amount', data.get('amount'), '%.2f' % self.amount))
if self.partner_reference and data.get('customerId') != self.partner_reference:
invalid_parameters.append(('customerId', data.get('customerId'), self.partner_reference))
return invalid_parameters
@api.multi
def _sips_form_validate(self, data):
data = self._sips_data_to_object(data.get('Data'))
status = data.get('responseCode')
data = {
'acquirer_reference': data.get('transactionReference'),
'partner_reference': data.get('customerId'),
'date_validate': data.get('transactionDateTime',
fields.Datetime.now())
}
res = False
if status in self._sips_valid_tx_status:
msg = 'Payment for tx ref: %s, got response [%s], set as done.' % \
(self.reference, status)
_logger.info(msg)
data.update(state='done', state_message=msg)
res = True
elif status in self._sips_error_tx_status:
msg = 'Payment for tx ref: %s, got response [%s], set as ' \
'error.' % (self.reference, status)
data.update(state='error', state_message=msg)
elif status in self._sips_wait_tx_status:
msg = 'Received wait status for payment ref: %s, got response ' \
'[%s], set as error.' % (self.reference, status)
data.update(state='error', state_message=msg)
elif status in self._sips_refused_tx_status:
msg = 'Received refused status for payment ref: %s, got response' \
' [%s], set as error.' % (self.reference, status)
data.update(state='error', state_message=msg)
elif status in self._sips_pending_tx_status:
msg = 'Payment ref: %s, got response [%s] set as pending.' \
% (self.reference, status)
data.update(state='pending', state_message=msg)
elif status in self._sips_cancel_tx_status:
msg = 'Received notification for payment ref: %s, got response ' \
'[%s], set as cancel.' % (self.reference, status)
data.update(state='cancel', state_message=msg)
else:
msg = 'Received unrecognized status for payment ref: %s, got ' \
'response [%s], set as error.' % (self.reference, status)
data.update(state='error', state_message=msg)
_logger.info(msg)
self.write(data)
return res |
class AcquirerSips(models.Model): |
convert_data_type.py | # Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import logging as log
import numpy as np
from openvino.tools.mo.front.extractor import get_new_placeholder_name
from openvino.tools.mo.graph.graph import Node, Graph
from openvino.tools.mo.utils.error import Error
from openvino.tools.mo.utils.utils import refer_to_faq_msg
"""
Packed data of custom types are stored in numpy uint8 data type.
To distinguish true uint8 and custom data we introduce this class not to store,
but to have unique data type in SUPPORTED_DATA_TYPES map
"""
class packed_U1(np.generic):
pass
class packed_U4(np.generic):
pass
class packed_I4(np.generic):
pass
SUPPORTED_DATA_TYPES = {
'float': (np.float32, 'FP32', 'f32'),
'half': (np.float16, 'FP16', 'f16'),
'FP32': (np.float32, 'FP32', 'f32'),
'FP64': (np.float64, 'FP64', 'f64'),
'FP16': (np.float16, 'FP16', 'f16'),
'I32': (np.int32, 'I32', 'i32'),
'I64': (np.int64, 'I64', 'i64'),
'int8': (np.int8, 'I8', 'i8'),
'int32': (np.int32, 'I32', 'i32'),
'int64': (np.int64, 'I64', 'i64'),
'bool': (np.bool, 'BOOL', 'boolean'),
'uint8': (np.uint8, 'U8', 'u8'),
'uint32': (np.uint32, 'U32', 'u32'),
'uint64': (np.uint64, 'U64', 'u64'),
# custom types
'U1': (packed_U1, 'U1', 'u1'),
'int4': (packed_I4, 'I4', 'i4'),
'uint4': (packed_U4, 'U4', 'u4'),
'I4': (packed_I4, 'I4', 'i4'),
'U4': (packed_U4, 'U4', 'u4'),
}
def data_type_str_to_np(data_type_str: str):
return SUPPORTED_DATA_TYPES[data_type_str][0] if data_type_str in SUPPORTED_DATA_TYPES else None
def data_type_str_to_precision(data_type_str: str):
return SUPPORTED_DATA_TYPES[data_type_str][1] if data_type_str in SUPPORTED_DATA_TYPES else None
def data_type_str_to_destination_type(data_type_str: str):
return SUPPORTED_DATA_TYPES[data_type_str][2] if data_type_str in SUPPORTED_DATA_TYPES else None
def np_data_type_to_precision(np_data_type):
for np_t, precision, _ in SUPPORTED_DATA_TYPES.values():
if np_t == np_data_type:
return precision
raise Error('Data type "{}" is not supported'.format(np_data_type))
def np_data_type_to_destination_type(np_data_type):
for np_t, _, destination_type in SUPPORTED_DATA_TYPES.values():
if np_t == np_data_type:
return destination_type
raise Error('Data type "{}" is not supported'.format(np_data_type))
def destination_type_to_np_data_type(dst_type):
for np_t, _, destination_type in SUPPORTED_DATA_TYPES.values():
if destination_type == dst_type:
return np_t
raise Error('Destination type "{}" is not supported'.format(dst_type))
def precision_to_destination_type(data_type_str):
for _, precision, destination_type in SUPPORTED_DATA_TYPES.values():
if precision == data_type_str:
return destination_type
raise Error('Data type "{}" is not supported'.format(data_type_str))
def convert_blob(blob: np.ndarray, dst_type: type):
if blob.dtype == dst_type:
return blob, None, None
converted_blob = blob.astype(dtype=dst_type, casting="unsafe")
if dst_type in (np.int32, np.int64, np.uint8, np.int8) and not np.array_equal(blob, converted_blob):
raise Error('The conversion of blob with value "{}" to dst_type "{}" results in rounding'.format(
blob, dst_type))
finite_match = (np.isfinite(blob) != np.isfinite(converted_blob))
zero_match = ((blob == 0) != (converted_blob == 0))
finite_match_count = np.count_nonzero(finite_match)
zero_match_count = np.count_nonzero(zero_match)
return converted_blob, finite_match_count, zero_match_count
def convert_node_blobs(graph: Graph, node: Node, data_type: type):
out_edges = graph.out_edges(node.node, data=True)
# if the data.value is used as binary weights
if any('bin' in d for _, __, d in out_edges):
blob = node.value
if blob.dtype != data_type:
new_blob, finite_match_count, zero_match_count = convert_blob(blob, data_type)
consumers = [x.name if x.has_valid('name') else '<NO NAME>' for x in node.out_nodes()]
log.debug(
'Blob was converted to {} while dumping to the bin file. This blob is an input for {} nodes.'.format(
data_type, consumers))
if finite_match_count:
log.error(
("{} elements of {} were clipped to infinity while converting a blob for node [{}] to {}. " +
refer_to_faq_msg(76)).format(finite_match_count, blob.size, consumers, data_type))
if zero_match_count:
log.warning(
("{} elements of {} were clipped to zero while converting a blob for node [{}] to {}. " +
refer_to_faq_msg(77)).format(zero_match_count, blob.size, consumers, data_type))
node.value = new_blob
# for the constant node need to propagate the converted value to the node output because there is a fake
# input data for the 'Const' nodes being generated in the CreateConstNodesReplacement
if len(node.out_nodes()) == 1 and node.out_node(0).op == 'Const':
const_node = node.out_node(0)
const_node.value = new_blob
const_node.infer(const_node)
const_node.type_infer(const_node)
def convert_parameters_data_type(graph: Graph, data_type_str: str):
inputs = graph.get_op_nodes(op='Parameter')
data_type = data_type_str_to_np(data_type_str)
user_defined_data_types = graph.graph['user_shapes'] if 'user_shapes' in graph.graph else None
for input in inputs:
user_defined_type = None
name = input.soft_get('initial_node_name', input.id)
# override data type for Parameter specified by the user. This is a workaround for the issue in the
# extensions.middle.ChangePlaceholderTypes transformation which has an incorrect condition and always overrides
# Parameter data type to np.float32. When the transformation is fixed the code below must be updated
if user_defined_data_types is not None and name in user_defined_data_types:
for desc in user_defined_data_types[name]:
if 'port' in desc and desc['port'] is None: # neither input nor output port specified
user_defined_type = desc.get('data_type', None)
else: # need to check the particular port the Parameter was created for
p_name = get_new_placeholder_name(name, 'out' in desc, desc['out'] if 'out' in desc else desc['in'])
if p_name == input.soft_get('name'):
user_defined_type = desc.get('data_type', None)
if user_defined_type is not None:
log.info('Overriding Parameter node {} data type to {}'.format(name, user_defined_type))
input['data_type'] = user_defined_type
input.out_port(0).set_data_type(user_defined_type, True)
elif not input.has_valid('data_type') or input.data_type == np.float32:
input['data_type'] = data_type
input.out_port(0).set_data_type(data_type, True)
else:
log.info('Do not change data type for node {}'.format(input.soft_get('name')))
def | (graph: Graph, data_type_str: str):
for node in graph.get_data_nodes():
if node.value is not None:
try:
if node.value.dtype in [np.float32, np.float64, np.float16] and not node.has_and_set('correct_data_type'):
convert_node_blobs(graph, node, data_type_str_to_np(data_type_str))
except Exception as e:
raise Error('Coudn\'t convert blob {}, details: {}', node.soft_get('name'), e) from e
| convert_blobs |
0002_alter_family_id_grupo.py | # Generated by Django 3.2.2 on 2021-05-28 06:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [ | ]
operations = [
migrations.AlterField(
model_name='family',
name='id_grupo',
field=models.IntegerField(primary_key=True, serialize=False),
),
] | ('family', '0001_initial'), |
views.py | from django.shortcuts import render, redirect
from django.views.generic import DetailView, CreateView, TemplateView
from django.contrib.auth.mixins import LoginRequiredMixin
from .models import Project
from apps.bug.models import Bug, Comment
# Create your views here.
class ProjectDetail(DetailView):
model = Project
class ProjectCreate(LoginRequiredMixin,CreateView):
model = Project
fields = ['name', 'short_description']
login_url = '/login/'
def form_valid(self, form):
form.instance.owner = self.request.user
return super().form_valid(form)
class ReportBug(CreateView):
model = Bug
template_name = "project/report_bug.html"
fields = [
'title',
'classification',
'description'
]
def get_context_data(self, **kwargs):
self.user = self.request.user
self.project = Project.objects.get(pk = self.kwargs['pk'])
ctx = super(ReportBug, self).get_context_data(**kwargs)
ctx['user'] = self.user
ctx['project'] = self.project
return ctx
def form_valid(self, form):
form.instance.project = Project.objects.get(pk = self.kwargs['pk'])
form.instance.reporter = self.request.user
form.instance.bug_number = form.instance.project.get_bug_num()
return super().form_valid(form)
class BugThread(TemplateView):
| template_name = 'bug/bug_detail.html'
def post(self, request, project, bug_number):
bug = Bug.objects.get(project__pk = project, bug_number = bug_number)
comm = Comment(
bug = bug,
commenter = request.user,
comment = request.POST['comment']
)
if 'close_thread' in request.POST.keys():
close_thread = request.POST['close_thread']
print(close_thread)
if close_thread:
bug.close_bug()
print(f'The bug {bug} is now closed.')
comm.save()
return redirect('bug-thread', project, bug_number)
def get_context_data(self, **kwargs):
bug = Bug.objects.get(project__pk = self.kwargs['project'], bug_number = self.kwargs['bug_number'])
ctx = {
'bug': bug
}
return ctx |
|
cryojet.py | from enum import Enum
from gi.repository import GLib
from zope.interface import implementer
import mxdc.devices.shutter
from mxdc import Device, Signal, Property
from mxdc.devices import misc
from mxdc.utils.log import get_module_logger
from .interfaces import ICryostat
logger = get_module_logger(__name__)
class CryoJetNozzle(mxdc.devices.shutter.EPICSShutter):
"""
A specialized in-out actuator for pneumatic Cryojet nozzles.
:param name: The process variable name of the devices
"""
def __init__(self, name):
open_name = "%s:opr:open" % name
close_name = "%s:opr:close" % name
state_name = "%s:out" % name
mxdc.devices.shutter.EPICSShutter.__init__(self, open_name, close_name, state_name)
self._messages = ['Restoring', 'Retracting']
self._name = 'Cryojet Nozzle'
@implementer(ICryostat)
class CryostatBase(Device):
"""
Base class for all cryostat devices. A cryostat maintains low temperatures at the sample position.
Signals:
- temp (float,): Sample temperature
- level (float,): Cryogen level
- sample (float,): Cryogen flow-rate
- shield (float,): Shield flow-rate
"""
class Positions(Enum):
IN, OUT = range(2)
class Signals:
temp = Signal('temp', arg_types=(float,))
level = Signal('level', arg_types=(float,))
sample = Signal('sample', arg_types=(float,))
shield = Signal('shield', arg_types=(float,))
pos = Signal('position', arg_types=(object,))
# Properties
temperature = Property(type=float, default=0.0)
shield = Property(type=float, default=0.0)
sample = Property(type=float, default=0.0)
level = Property(type=float, default=0.0)
def configure(self, temp=None, sample=None, shield=None, position=None):
"""
Configure the Cryostat.
:param temp: Set the target sample temperature
:param sample: Set the sample flow rate
:param shield: Set the shield flow rate
:param position: If the cryostat set the position. Should be one of Positions.IN, Positions.OUT
"""
def stop(self):
"""
Stop the cryostat
"""
def start(self):
"""
Start the cryostat
"""
@implementer(ICryostat)
class CryoJetBase(Device):
"""
Cryogenic Nozzle Jet Device
"""
temperature = Property(type=float, default=0.0)
shield = Property(type=float, default=0.0)
sample = Property(type=float, default=0.0)
level = Property(type=float, default=0.0)
def __init__(self, *args, **kwargs):
super().__init__()
self.name = 'Cryojet'
self._previous_flow = 7.0
self.setup(*args, **kwargs)
def setup(self, *args, **kwargs):
pass
def anneal(self, duration):
"""
Anneal for the specified duration
:param duration: duration in seconds to stop cooling
"""
pass
def on_temp(self, obj, val):
if val < 110:
self.set_state(health=(0, 'temp', ''))
elif val < 115:
self.set_state(health=(2, 'temp', 'Temp. high!'))
else:
self.set_state(health=(4, 'temp', 'Temp. too high!'))
self.set_property('temperature', val)
def on_sample(self, obj, val):
if val > 5:
self.set_state(health=(0, 'sample', ''))
elif val > 4:
self.set_state(health=(2, 'sample', 'Sample Flow Low!'))
else:
self.set_state(health=(4, 'sample','Sample Flow Too Low!'))
self.set_property('sample', val)
def on_shield(self, obj, val):
if val > 5:
self.set_state(health=(0, 'shield', ''))
elif val > 4:
self.set_state(health=(2, 'shield','Shield Flow Low!'))
else:
self.set_state(health=(4, 'shield','Shield Flow Too Low!'))
self.set_property('shield', val)
def on_level(self, obj, val):
|
def on_nozzle(self, obj, val):
if val:
self.set_state(health=(1, 'nozzle', 'Retracted!'))
else:
self.set_state(health=(0, 'nozzle', 'Restored'))
class CryoJet(CryoJetBase):
def setup(self, name, level_name, nozzle_name):
self.temp_fbk = self.add_pv('{}:sensorTemp:get'.format(name))
self.sample_fbk = self.add_pv('{}:SampleFlow:get'.format(name))
self.shield_fbk = self.add_pv('{}:ShieldFlow:get'.format(name))
self.sample_sp = self.add_pv('{}:sampleFlow:set'.format(name))
self.level_fbk = self.add_pv('{}:ch1LVL:get'.format(level_name))
self.fill_status = self.add_pv('{}:status:ch1:N.SVAL'.format(level_name))
self.nozzle = CryoJetNozzle(nozzle_name)
# connect signals for monitoring state
self.temp_fbk.connect('changed', self.on_temp)
self.level_fbk.connect('changed', self.on_level)
self.sample_fbk.connect('changed', self.on_sample)
self.sample_fbk.connect('changed', self.on_shield)
self.nozzle.connect('changed', self.on_nozzle)
def on_level(self, obj, val):
if val < 150:
self.set_state(health=(4, 'cryo','Cryogen too low!'))
elif val < 200:
self.set_state(health=(2, 'cryo','Cryogen low!'))
else:
self.set_state(health=(0, 'cryo', ''))
self.set_property('level', val/10.)
def anneal(self, duration):
previous_flow = self.sample_fbk.get()
self.sample_sp.put(0.0)
GLib.timeout_add(duration*1000, self.sample_sp.put, previous_flow)
class CryoJet5(CryoJetBase):
def setup(self, name, nozzle_name):
self.temp_fbk = self.add_pv('{}:sample:temp:fbk'.format(name))
self.sample_fbk = self.add_pv('{}:sample:flow:fbk'.format(name))
self.shield_fbk = self.add_pv('{}:shield:flow:fbk'.format(name))
self.sample_sp = self.add_pv('{}:sample:flow'.format(name))
self.level_fbk = self.add_pv('{}:autofill:level:fbk'.format(name))
self.fill_status = self.add_pv('{}:autofill:state'.format(name))
self.nozzle = CryoJetNozzle(nozzle_name)
# connect signals for monitoring state
self.temp_fbk.connect('changed', self.on_temp)
self.level_fbk.connect('changed', self.on_level)
self.sample_fbk.connect('changed', self.on_sample)
self.shield_fbk.connect('changed', self.on_shield)
self.nozzle.connect('changed', self.on_nozzle)
class SimCryoJet(CryoJetBase):
def setup(self, *args, **kwargs):
self.nozzle = mxdc.devices.shutter.SimShutter('Sim Cryo Nozzle')
self.temp_fbk = misc.SimPositioner('Cryo Temperature', pos=102.5, noise=3)
self.sample_fbk = misc.SimPositioner('Cryo Sample flow', pos=6.5, noise=1)
self.shield_fbk = misc.SimPositioner('Cryo Shield flow', pos=9.5, noise=1)
self.level_fbk = misc.SimPositioner('Cryo Level', pos=35.5, noise=10)
self.name = 'Sim CryoJet'
# connect signals for monitoring state
self.temp_fbk.connect('changed', self.on_temp)
self.level_fbk.connect('changed', self.on_level)
self.sample_fbk.connect('changed', self.on_sample)
self.shield_fbk.connect('changed', self.on_shield)
self.nozzle.connect('changed', self.on_nozzle)
def _simulate_nozzle(self, *args, **kwargs):
if self.nozzle.is_open():
self.nozzle.close()
else:
self.nozzle.open()
return True
def anneal(self, duration):
previous_flow = self.sample_fbk.get()
self.sample_sp.put(0.0)
GLib.timeout_add(duration*1000, self.sample_fbk.put, previous_flow)
__all__ = ['CryoJet', 'CryoJet5', 'SimCryoJet']
| if val < 15:
self.set_state(health=(4, 'cryo','Cryogen too low!'))
elif val < 20:
self.set_state(health=(2, 'cryo','Cryogen low!'))
else:
self.set_state(health=(0, 'cryo', ''))
self.set_property('level', val) |
styles.ts | import styled from "styled-components";
export const Container = styled.div`
display: flex;
align-items: center;
justify-content: center; | div{
width: 300px;
background-color: var(--shape);
padding:1.5rem 2rem;
border-radius: 0.25rem;
color: var(--text-title);
header{
display: flex;
align-items: center;
justify-content: space-between;
}
strong{
display: block;
margin-top: 1rem;
font-size:2rem;
font-weight: 500;
line-height:3rem;
}
&.highlight-background{
background-color: var(--orange);
color: var(--shape);
}
}
`; | flex-wrap: wrap;
gap: 2rem;
margin-top: -10rem;
|
pkg_config_test.py | import unittest
from conans.client.conf import get_default_settings_yml
from conans.client.generators.pkg_config import PkgConfigGenerator
from conans.model.build_info import CppInfo
from conans.model.conan_file import ConanFile
from conans.model.env_info import EnvValues
from conans.model.ref import ConanFileReference
from conans.model.settings import Settings
from conans.test.utils.mocks import TestBufferConanOutput
class PkgGeneratorTest(unittest.TestCase):
def variables_setup_test(self):
|
def pkg_config_custom_names_test(self):
conanfile = ConanFile(TestBufferConanOutput(), None)
conanfile.initialize(Settings({}), EnvValues())
ref = ConanFileReference.loads("MyPkg/0.1@lasote/stables")
cpp_info = CppInfo(ref.name, "dummy_root_folder1")
cpp_info.name = "my_pkg"
cpp_info.names["pkg_config"] = "my_pkg_custom_name"
cpp_info.defines = ["MYDEFINE1"]
cpp_info.cflags.append("-Flag1=23")
cpp_info.version = "1.3"
cpp_info.description = "My cool description"
conanfile.deps_cpp_info.add(ref.name, cpp_info)
ref = ConanFileReference.loads("MyPkg1/0.1@lasote/stables")
cpp_info = CppInfo(ref.name, "dummy_root_folder1")
cpp_info.name = "MYPKG1"
cpp_info.names["pkg_config"] = "my_pkg1_custom_name"
cpp_info.defines = ["MYDEFINE11"]
cpp_info.cflags.append("-Flag1=21")
cpp_info.version = "1.7"
cpp_info.description = "My other cool description"
conanfile.deps_cpp_info.add(ref.name, cpp_info)
ref = ConanFileReference.loads("MyPkg2/0.1@lasote/stables")
cpp_info = CppInfo(ref.name, "dummy_root_folder2")
cpp_info.names["pkg_config"] = "my_pkg2_custom_name"
cpp_info.defines = ["MYDEFINE2"]
cpp_info.version = "2.3"
cpp_info.exelinkflags = ["-exelinkflag"]
cpp_info.sharedlinkflags = ["-sharedlinkflag"]
cpp_info.cxxflags = ["-cxxflag"]
cpp_info.public_deps = ["MyPkg", "MyPkg1"]
conanfile.deps_cpp_info.add(ref.name, cpp_info)
ref = ConanFileReference.loads("zlib/1.2.11@lasote/stable")
cpp_info = CppInfo(ref.name, "dummy_root_folder_zlib")
cpp_info.name = "ZLIB"
cpp_info.defines = ["MYZLIBDEFINE2"]
cpp_info.version = "1.2.11"
conanfile.deps_cpp_info.add(ref.name, cpp_info)
ref = ConanFileReference.loads("bzip2/0.1@lasote/stables")
cpp_info = CppInfo(ref.name, "dummy_root_folder2")
cpp_info.name = "BZip2"
cpp_info.names["pkg_config"] = "BZip2"
cpp_info.defines = ["MYDEFINE2"]
cpp_info.version = "2.3"
cpp_info.exelinkflags = ["-exelinkflag"]
cpp_info.sharedlinkflags = ["-sharedlinkflag"]
cpp_info.cxxflags = ["-cxxflag"]
cpp_info.public_deps = ["MyPkg", "MyPkg1", "zlib"]
conanfile.deps_cpp_info.add(ref.name, cpp_info)
generator = PkgConfigGenerator(conanfile)
files = generator.content
self.assertEqual(files["my_pkg2_custom_name.pc"], """prefix=dummy_root_folder2
libdir=${prefix}/lib
includedir=${prefix}/include
Name: my_pkg2_custom_name
Description: Conan package: my_pkg2_custom_name
Version: 2.3
Libs: -L${libdir} -sharedlinkflag -exelinkflag
Cflags: -I${includedir} -cxxflag -DMYDEFINE2
Requires: my_pkg_custom_name my_pkg1_custom_name
""")
self.assertEqual(files["my_pkg1_custom_name.pc"], """prefix=dummy_root_folder1
libdir=${prefix}/lib
includedir=${prefix}/include
Name: my_pkg1_custom_name
Description: My other cool description
Version: 1.7
Libs: -L${libdir}
Cflags: -I${includedir} -Flag1=21 -DMYDEFINE11
""")
self.assertEqual(files["my_pkg_custom_name.pc"], """prefix=dummy_root_folder1
libdir=${prefix}/lib
includedir=${prefix}/include
Name: my_pkg_custom_name
Description: My cool description
Version: 1.3
Libs: -L${libdir}
Cflags: -I${includedir} -Flag1=23 -DMYDEFINE1
""")
self.assertEqual(files["BZip2.pc"], """prefix=dummy_root_folder2
libdir=${prefix}/lib
includedir=${prefix}/include
Name: BZip2
Description: Conan package: BZip2
Version: 2.3
Libs: -L${libdir} -sharedlinkflag -exelinkflag
Cflags: -I${includedir} -cxxflag -DMYDEFINE2
Requires: my_pkg_custom_name my_pkg1_custom_name zlib
""")
def apple_frameworks_test(self):
settings = Settings.loads(get_default_settings_yml())
settings.compiler = "apple-clang"
settings.os = "Macos"
conanfile = ConanFile(TestBufferConanOutput(), None)
conanfile.initialize(Settings({}), EnvValues())
conanfile.settings = settings
ref = ConanFileReference.loads("MyPkg/0.1@lasote/stables")
cpp_info = CppInfo(ref.name, "dummy_root_folder1")
cpp_info.frameworks = ['AudioUnit', 'AudioToolbox']
cpp_info.version = "1.3"
cpp_info.description = "My cool description"
conanfile.deps_cpp_info.add(ref.name, cpp_info)
generator = PkgConfigGenerator(conanfile)
files = generator.content
self.assertEqual(files["MyPkg.pc"], """prefix=dummy_root_folder1
libdir=${prefix}/lib
includedir=${prefix}/include
Name: MyPkg
Description: My cool description
Version: 1.3
Libs: -L${libdir} -Wl,-rpath,"${libdir}" -framework AudioUnit -framework AudioToolbox
Cflags: -I${includedir}
""")
| conanfile = ConanFile(TestBufferConanOutput(), None)
conanfile.initialize(Settings({}), EnvValues())
ref = ConanFileReference.loads("MyPkg/0.1@lasote/stables")
cpp_info = CppInfo(ref.name, "dummy_root_folder1")
cpp_info.name = "my_pkg"
cpp_info.defines = ["MYDEFINE1"]
cpp_info.cflags.append("-Flag1=23")
cpp_info.version = "1.3"
cpp_info.description = "My cool description"
conanfile.deps_cpp_info.add(ref.name, cpp_info)
ref = ConanFileReference.loads("MyPkg1/0.1@lasote/stables")
cpp_info = CppInfo(ref.name, "dummy_root_folder1")
cpp_info.name = "MYPKG1"
cpp_info.defines = ["MYDEFINE11"]
cpp_info.cflags.append("-Flag1=21")
cpp_info.version = "1.7"
cpp_info.description = "My other cool description"
cpp_info.public_deps = ["MyPkg"]
conanfile.deps_cpp_info.add(ref.name, cpp_info)
ref = ConanFileReference.loads("MyPkg2/0.1@lasote/stables")
cpp_info = CppInfo(ref.name, "dummy_root_folder2")
cpp_info.defines = ["MYDEFINE2"]
cpp_info.version = "2.3"
cpp_info.exelinkflags = ["-exelinkflag"]
cpp_info.sharedlinkflags = ["-sharedlinkflag"]
cpp_info.cxxflags = ["-cxxflag"]
cpp_info.public_deps = ["MyPkg"]
conanfile.deps_cpp_info.add(ref.name, cpp_info)
generator = PkgConfigGenerator(conanfile)
files = generator.content
self.assertEqual(files["MyPkg2.pc"], """prefix=dummy_root_folder2
libdir=${prefix}/lib
includedir=${prefix}/include
Name: MyPkg2
Description: Conan package: MyPkg2
Version: 2.3
Libs: -L${libdir} -sharedlinkflag -exelinkflag
Cflags: -I${includedir} -cxxflag -DMYDEFINE2
Requires: my_pkg
""")
self.assertEqual(files["mypkg1.pc"], """prefix=dummy_root_folder1
libdir=${prefix}/lib
includedir=${prefix}/include
Name: mypkg1
Description: My other cool description
Version: 1.7
Libs: -L${libdir}
Cflags: -I${includedir} -Flag1=21 -DMYDEFINE11
Requires: my_pkg
""")
self.assertEqual(files["my_pkg.pc"], """prefix=dummy_root_folder1
libdir=${prefix}/lib
includedir=${prefix}/include
Name: my_pkg
Description: My cool description
Version: 1.3
Libs: -L${libdir}
Cflags: -I${includedir} -Flag1=23 -DMYDEFINE1
""") |
coupled_dfs_stack.rs | use super::PointerPair;
use crate::v2::NodeId;
/// **(internal)** A 32-bit version of the `Stack` used in the general `u48` algorithm.
/// Method documentation omitted when equivalent to the one on `u48` version.
///
/// The main difference is that the `left` and `right` pointers are only 32-bit, so we can
/// fit both of them into a single `u64`. There is one small caveat though: The `left` pointer
/// (which is expected to have a larger range) can go up to `2^32 - 1`, but the `right`
/// pointer can only go up to `2^31 - 1`. This is because we need to somehow differentiate
/// the results from tasks, and results can still theoretically in the worst case extend
/// to 48 bits. So we keep the top-most bit of the stack entry reserved as a flag whether | items: Vec<PointerPair>,
}
impl Stack {
pub fn new(variable_count: u16) -> Stack {
let variable_count = usize::from(variable_count);
let mut stack = Stack {
index_after_last: 1,
items: vec![PointerPair(0); 2 * variable_count + 2],
};
stack.items[0] = PointerPair::from(PointerPair::RESULT_MASK);
stack
}
#[inline]
pub fn has_last_entry(&self) -> bool {
self.index_after_last == 2
}
#[inline]
pub unsafe fn push_task_unchecked(&mut self, tasks: PointerPair) {
debug_assert!(self.index_after_last < self.items.len());
let entry = unsafe { self.items.get_unchecked_mut(self.index_after_last) };
*entry = tasks;
self.index_after_last += 1;
}
#[inline]
pub fn has_result(&self) -> bool {
debug_assert!(self.index_after_last > 1);
unsafe {
self.items
.get_unchecked(self.index_after_last - 1)
.is_result()
}
}
#[inline]
pub unsafe fn pop_results_unchecked(&mut self) -> (NodeId, NodeId) {
debug_assert!(self.index_after_last > 2);
debug_assert!(self.items[self.index_after_last - 1].is_result());
debug_assert!(self.items[self.index_after_last - 2].is_result());
self.index_after_last -= 2;
let x = unsafe { self.items.get_unchecked(self.index_after_last) };
let y = unsafe { self.items.get_unchecked(self.index_after_last + 1) };
(x.into_result(), y.into_result())
}
#[inline]
pub unsafe fn peek_as_task_unchecked(&self) -> PointerPair {
debug_assert!(self.index_after_last > 1);
debug_assert!(!self.items[self.index_after_last - 1].is_result());
unsafe { *self.items.get_unchecked(self.index_after_last - 1) }
}
#[inline]
pub unsafe fn save_result_unchecked(&mut self, result: NodeId) -> bool {
debug_assert!(self.index_after_last >= 2);
debug_assert!(!self.items[self.index_after_last - 1].is_result());
// This operation is safe because we have that dummy first
// entry that gets accessed here if needed.
let before_top_index = self.index_after_last - 2;
let top_index = self.index_after_last - 1;
let before_top = unsafe { self.items.get_unchecked_mut(before_top_index) };
if before_top.is_result() {
// entry[-2] is also a result - just replace the top
unsafe {
*self.items.get_unchecked_mut(top_index) = PointerPair::from_result(result);
}
true
} else {
// entry[-2] is a task - swap it on top
let swap_on_top = *before_top;
*before_top = PointerPair::from_result(result);
unsafe {
*self.items.get_unchecked_mut(top_index) = swap_on_top;
}
false
}
}
} | /// the item is a result or not.
pub(super) struct Stack {
index_after_last: usize, |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.