file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
index.tsx | import { Summary } from '../Summary';
import { TransactionsTable } from '../TransactionsTable';
import { Container } from './styles';
export function Dashboard() {
return (
<Container> | )
} | <Summary />
<TransactionsTable />
</Container> |
sample.rs | #[derive(Debug)]
struct HasLifetime<'a> {
some_string: &'a str
}
fn do_something<'a>(has_lifetime: &HasLifetime<'a>) {
println!("{}", has_lifetime.some_string);
}
fn main() {
let has_lifetime = HasLifetime { | };
do_something(&has_lifetime);
}
type TimeoutFunction = Box<Fn(&str, &ArgsMatcher) -> ()>;
fn whatever(&self, closure: Box<Fn() + 'static>) {
// do nothing
} | some_string: "example" |
abt-float-input.ts | import { customElement, bindable, bindingMode, containerless, DOM } from 'aurelia-framework';
export type FloatInputPlacement = 'sm' | 'md' | 'lg';
@containerless()
@customElement('abt-float-input')
export class BootstrapFloatInput {
@bindable({ defaultBindingMode: bindingMode.oneTime }) public id: string;
@bindable({ defaultBindingMode: bindingMode.oneWay }) public style: string;
@bindable({ defaultBindingMode: bindingMode.oneWay }) public class: string;
@bindable({ defaultBindingMode: bindingMode.oneWay }) public placeholder: string ;
@bindable({ defaultBindingMode: bindingMode.oneWay }) public placeholderFontSize: string ;
@bindable({ defaultBindingMode: bindingMode.oneWay }) public labelFontSize: string ;
@bindable({ defaultBindingMode: bindingMode.oneWay }) public placeholderOpacity: string ;
@bindable({ defaultBindingMode: bindingMode.oneWay }) public placeholderTop: string;
@bindable({ defaultBindingMode: bindingMode.oneWay }) public size: FloatInputPlacement = 'md';
@bindable({ defaultBindingMode: bindingMode.oneWay }) public type: string = 'text';
@bindable({ defaultBindingMode: bindingMode.oneWay }) public labelColor: string ;
@bindable({ defaultBindingMode: bindingMode.oneWay }) public placeholderColor: string ;
@bindable({ defaultBindingMode: bindingMode.oneWay }) public required: boolean | string = false;
@bindable({ defaultBindingMode: bindingMode.oneWay }) public readonly: boolean | string = false;
@bindable({ defaultBindingMode: bindingMode.twoWay }) public value: string;
private floatInput: HTMLInputElement;
private floatInputLabel: HTMLLabelElement;
private floatInputTemplate: Element;
private attached() {
if (this.value) {
this.floatInput.value = this.value;
}
let t = this.floatInputTemplate.hasAttribute('required');
let tt = this.floatInputTemplate.hasAttribute('readonly');
const isRequired = (this.required === '' && this.floatInputTemplate.hasAttribute('required')) || this.required.toString() === 'true';
const isReadOnly = (this.readonly === '' && this.floatInputTemplate.hasAttribute('readonly')) || this.readonly.toString() === 'true';
this.floatInput.required = isRequired;
this.floatInput.readOnly = isReadOnly;
| let top = '';
if (!this.floatInput.classList.contains('form-control')) {
this.floatInput.classList.add('form-control');
}
if (this.floatInput.classList.contains('form-control-sm')) {
this.size = 'sm';
}
if (this.floatInput.classList.contains('form-control-lg')) {
this.size = 'lg';
}
if (this.size === 'sm') {
this.floatInput.classList.add('form-control-sm');
fontSize = '90%';
top = '.5em';
} else if (this.size === 'lg' || this.floatInput.classList.contains('form-control-lg')) {
this.floatInput.classList.add('form-control-lg');
fontSize = '120%';
top = '.7em';
} else {
this.floatInput.classList.remove('form-control-sm');
this.floatInput.classList.remove('form-control-lg');
fontSize = '100%';
top = '.7em';
}
if (this.floatInput.classList.contains('form-control')) {
this.floatInputLabel.classList.add('has-float-label');
let style = `
#${id}.has-float-label .form-control:placeholder-shown:not(:focus) + * {
color : ${this.placeholderColor || 'black'} !important;
font-size: ${this.placeholderFontSize || fontSize} !important;
opacity: ${this.placeholderOpacity || '.5'} !important;
top: ${this.placeholderTop || top} !important;
}
#${id}.has-float-label label, #${id}.has-float-label > span
{
color : ${this.labelColor || 'black'} !important;
font-size: ${this.labelFontSize || '75%'} !important;
}`;
DOM.injectStyles(style, null, null, 's' + id);
}
}
} | let id = this.floatInputLabel.id;
let fontSize = ''; |
setup.py | import os
import re
from setuptools import setup, find_packages
try:
from pypandoc import convert
read_md = lambda f: convert(f, 'rst')
except ImportError:
print("warning: pypandoc module not found, could not convert Markdown to RST")
read_md = lambda f: open(f, 'r').read()
| def read_version():
# __PATH__ = os.path.abspath(os.path.dirname(__file__))
# with open(os.path.join(__PATH__, 'breakout_env/__init__.py')) as f:
# version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", f.read(), re.M)
# if version_match:
# return version_match.group(1)
# raise RuntimeError("Unable to find __version__ string")
return "1.0.5"
setup(
name='breakout_env',
packages=find_packages(include=['breakout_env*']),
version=read_version(),
description='A configurable Breakout environment for reinforcement learning',
long_description=read_md('README.md'),
author='SSARCandy',
author_email='[email protected]',
license='MIT',
url='https://github.com/SSARCandy/breakout-env',
keywords=['game', 'learning', 'evironment'],
classifiers=[],
install_requires=['numpy>=1.1', 'distribute'],
include_package_data=True
) | |
theme.ts | import * as Db from "../db";
import { DbTheme } from "../db.types";
import { Theme } from "./theme.types";
export const getAllThemes = async () => {
return (await Db.getAllThemes()).map(toApiTheme);
};
export const getTheme = async (id: string) => {
const theme = await Db.getTheme(id);
if (!theme) {
throw new Error("Theme Not Found");
}
return toApiTheme(theme);
};
export const insertTheme = async (id: string, colors: unknown) => {
return toApiTheme(await Db.insertTheme(id, colors));
};
export const removeTheme = async (id: string) => {
await Db.removeTheme(id);
};
function | ({ path_id, filename, colors }: DbTheme): Theme {
return { id: path_id, filename, colors };
}
| toApiTheme |
sr-Cyrl-ME.js | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/ | (function (factory) {
if (typeof module === "object" && typeof module.exports === "object") {
var v = factory(null, exports);
if (v !== undefined) module.exports = v;
}
else if (typeof define === "function" && define.amd) {
define("@angular/common/locales/extra/sr-Cyrl-ME", ["require", "exports"], factory);
}
})(function (require, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
// THIS CODE IS GENERATED - DO NOT MODIFY.
var u = undefined;
exports.default = [[["поноћ", "подне", "јутро", "по под.", "вече", "ноћ"], ["поноћ", "подне", "јутро", "по под.", "вече", "ноћу"], ["поноћ", "подне", "ујутро", "по подне", "увече", "ноћу"]], [["поноћ", "подне", "јутро", "поподне", "вече", "ноћ"], u, u], ["00:00", "12:00", ["06:00", "12:00"], ["12:00", "18:00"], ["18:00", "21:00"], ["21:00", "06:00"]]];
});
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic3ItQ3lybC1NRS5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uLy4uLy4uLy4uLy4uLy4uLy4uL3BhY2thZ2VzL2NvbW1vbi9sb2NhbGVzL2V4dHJhL3NyLUN5cmwtTUUudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUE7Ozs7OztHQU1HOzs7Ozs7Ozs7Ozs7SUFFSCwwQ0FBMEM7SUFDMUMsSUFBTSxDQUFDLEdBQUcsU0FBUyxDQUFDO0lBRXBCLGtCQUFlLENBQUMsQ0FBQyxDQUFDLE9BQU8sRUFBQyxPQUFPLEVBQUMsT0FBTyxFQUFDLFNBQVMsRUFBQyxNQUFNLEVBQUMsS0FBSyxDQUFDLEVBQUMsQ0FBQyxPQUFPLEVBQUMsT0FBTyxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsTUFBTSxFQUFDLE1BQU0sQ0FBQyxFQUFDLENBQUMsT0FBTyxFQUFDLE9BQU8sRUFBQyxRQUFRLEVBQUMsVUFBVSxFQUFDLE9BQU8sRUFBQyxNQUFNLENBQUMsQ0FBQyxFQUFDLENBQUMsQ0FBQyxPQUFPLEVBQUMsT0FBTyxFQUFDLE9BQU8sRUFBQyxTQUFTLEVBQUMsTUFBTSxFQUFDLEtBQUssQ0FBQyxFQUFDLENBQUMsRUFBQyxDQUFDLENBQUMsRUFBQyxDQUFDLE9BQU8sRUFBQyxPQUFPLEVBQUMsQ0FBQyxPQUFPLEVBQUMsT0FBTyxDQUFDLEVBQUMsQ0FBQyxPQUFPLEVBQUMsT0FBTyxDQUFDLEVBQUMsQ0FBQyxPQUFPLEVBQUMsT0FBTyxDQUFDLEVBQUMsQ0FBQyxPQUFPLEVBQUMsT0FBTyxDQUFDLENBQUMsQ0FBQyxDQUFDIiwic291cmNlc0NvbnRlbnQiOlsiLyoqXG4gKiBAbGljZW5zZVxuICogQ29weXJpZ2h0IEdvb2dsZSBMTEMgQWxsIFJpZ2h0cyBSZXNlcnZlZC5cbiAqXG4gKiBVc2Ugb2YgdGhpcyBzb3VyY2UgY29kZSBpcyBnb3Zlcm5lZCBieSBhbiBNSVQtc3R5bGUgbGljZW5zZSB0aGF0IGNhbiBiZVxuICogZm91bmQgaW4gdGhlIExJQ0VOU0UgZmlsZSBhdCBodHRwczovL2FuZ3VsYXIuaW8vbGljZW5zZVxuICovXG5cbi8vIFRISVMgQ09ERSBJUyBHRU5FUkFURUQgLSBETyBOT1QgTU9ESUZZLlxuY29uc3QgdSA9IHVuZGVmaW5lZDtcblxuZXhwb3J0IGRlZmF1bHQgW1tbXCLQv9C+0L3QvtGbXCIsXCLQv9C+0LTQvdC1XCIsXCLRmNGD0YLRgNC+XCIsXCLQv9C+INC/0L7QtC5cIixcItCy0LXRh9C1XCIsXCLQvdC+0ZtcIl0sW1wi0L/QvtC90L7Rm1wiLFwi0L/QvtC00L3QtVwiLFwi0ZjRg9GC0YDQvlwiLFwi0L/QviDQv9C+0LQuXCIsXCLQstC10YfQtVwiLFwi0L3QvtGb0YNcIl0sW1wi0L/QvtC90L7Rm1wiLFwi0L/QvtC00L3QtVwiLFwi0YPRmNGD0YLRgNC+XCIsXCLQv9C+INC/0L7QtNC90LVcIixcItGD0LLQtdGH0LVcIixcItC90L7Rm9GDXCJdXSxbW1wi0L/QvtC90L7Rm1wiLFwi0L/QvtC00L3QtVwiLFwi0ZjRg9GC0YDQvlwiLFwi0L/QvtC/0L7QtNC90LVcIixcItCy0LXRh9C1XCIsXCLQvdC+0ZtcIl0sdSx1XSxbXCIwMDowMFwiLFwiMTI6MDBcIixbXCIwNjowMFwiLFwiMTI6MDBcIl0sW1wiMTI6MDBcIixcIjE4OjAwXCJdLFtcIjE4OjAwXCIsXCIyMTowMFwiXSxbXCIyMTowMFwiLFwiMDY6MDBcIl1dXTtcbiJdfQ== | |
workloadmeta_test.go | // Unless explicitly stated otherwise all files in this repository are licensed
// under the Apache License Version 2.0.
// This product includes software developed at Datadog (https://www.datadoghq.com/).
// Copyright 2016-present Datadog, Inc.
package collectors
import (
"fmt"
"testing"
"github.com/DataDog/datadog-agent/pkg/errors"
"github.com/DataDog/datadog-agent/pkg/util/kubernetes"
"github.com/DataDog/datadog-agent/pkg/workloadmeta"
"github.com/stretchr/testify/assert"
)
type store struct {
containers map[string]workloadmeta.Container
}
func (s *store) Subscribe(string, *workloadmeta.Filter) chan workloadmeta.EventBundle {
return nil
}
func (s *store) Unsubscribe(chan workloadmeta.EventBundle) {}
func (s *store) GetContainer(id string) (workloadmeta.Container, error) {
c, ok := s.containers[id]
if !ok {
return c, errors.NewNotFound(id)
}
return c, nil
}
func | (t *testing.T) {
const (
fullyFleshedContainerID = "foobarquux"
noEnvContainerID = "foobarbaz"
containerName = "agent"
podName = "datadog-agent-foobar"
podNamespace = "default"
env = "production"
svc = "datadog-agent"
version = "7.32.0"
)
standardTags := []string{
fmt.Sprintf("env:%s", env),
fmt.Sprintf("service:%s", svc),
fmt.Sprintf("version:%s", version),
}
podEntityID := workloadmeta.EntityID{
Kind: workloadmeta.KindKubernetesPod,
ID: "foobar",
}
podTaggerEntityID := fmt.Sprintf("kubernetes_pod_uid://%s", podEntityID.ID)
fullyFleshedContainerTaggerEntityID := fmt.Sprintf("container_id://%s", fullyFleshedContainerID)
noEnvContainerTaggerEntityID := fmt.Sprintf("container_id://%s", noEnvContainerID)
store := &store{
containers: map[string]workloadmeta.Container{
fullyFleshedContainerID: {
EntityID: workloadmeta.EntityID{
Kind: workloadmeta.KindContainer,
ID: fullyFleshedContainerID,
},
EntityMeta: workloadmeta.EntityMeta{
Name: containerName,
},
Image: workloadmeta.ContainerImage{
ID: "datadog/agent@sha256:a63d3f66fb2f69d955d4f2ca0b229385537a77872ffc04290acae65aed5317d2",
RawName: "datadog/agent@sha256:a63d3f66fb2f69d955d4f2ca0b229385537a77872ffc04290acae65aed5317d2",
Name: "datadog/agent",
ShortName: "agent",
Tag: "latest",
},
EnvVars: map[string]string{
"DD_ENV": env,
"DD_SERVICE": svc,
"DD_VERSION": version,
},
},
noEnvContainerID: {
EntityID: workloadmeta.EntityID{
Kind: workloadmeta.KindContainer,
ID: noEnvContainerID,
},
EntityMeta: workloadmeta.EntityMeta{
Name: containerName,
},
},
},
}
tests := []struct {
name string
labelsAsTags map[string]string
annotationsAsTags map[string]string
pod workloadmeta.KubernetesPod
expected []*TagInfo
}{
{
name: "fully formed pod (no containers)",
annotationsAsTags: map[string]string{
"gitcommit": "+gitcommit",
"component": "component",
},
labelsAsTags: map[string]string{
"ownerteam": "team",
"tier": "tier",
},
pod: workloadmeta.KubernetesPod{
EntityID: podEntityID,
EntityMeta: workloadmeta.EntityMeta{
Name: podName,
Namespace: podNamespace,
Annotations: map[string]string{
// Annotations as tags
"GitCommit": "foobar",
"ignoreme": "ignore",
"component": "agent",
// Custom tags from map
"ad.datadoghq.com/tags": `{"pod_template_version":"1.0.0"}`,
},
Labels: map[string]string{
// Labels as tags
"OwnerTeam": "container-integrations",
"tier": "node",
"pod-template-hash": "490794276",
// Standard tags
"tags.datadoghq.com/env": env,
"tags.datadoghq.com/service": svc,
"tags.datadoghq.com/version": version,
// K8s recommended tags
"app.kubernetes.io/name": svc,
"app.kubernetes.io/instance": podName,
"app.kubernetes.io/version": version,
"app.kubernetes.io/component": "agent",
"app.kubernetes.io/part-of": "datadog",
"app.kubernetes.io/managed-by": "helm",
},
},
// Owner tags
Owners: []workloadmeta.KubernetesPodOwner{
{
Kind: kubernetes.DeploymentKind,
Name: svc,
},
},
// PVC tags
PersistentVolumeClaimNames: []string{"pvc-0"},
// Phase tags
Phase: "Running",
// Container tags
Containers: []string{},
},
expected: []*TagInfo{
{
Source: workloadmetaCollectorName,
Entity: podTaggerEntityID,
HighCardTags: []string{
"gitcommit:foobar",
},
OrchestratorCardTags: []string{
fmt.Sprintf("pod_name:%s", podName),
"kube_ownerref_name:datadog-agent",
},
LowCardTags: append([]string{
fmt.Sprintf("kube_app_instance:%s", podName),
fmt.Sprintf("kube_app_name:%s", svc),
fmt.Sprintf("kube_app_version:%s", version),
fmt.Sprintf("kube_deployment:%s", svc),
fmt.Sprintf("kube_namespace:%s", podNamespace),
"component:agent",
"kube_app_component:agent",
"kube_app_managed_by:helm",
"kube_app_part_of:datadog",
"kube_ownerref_kind:deployment",
"pod_phase:running",
"pod_template_version:1.0.0",
"team:container-integrations",
"tier:node",
}, standardTags...),
StandardTags: standardTags,
},
},
},
{
name: "pod with fully formed container, standard tags from env",
pod: workloadmeta.KubernetesPod{
EntityID: podEntityID,
EntityMeta: workloadmeta.EntityMeta{
Name: podName,
Namespace: podNamespace,
},
Containers: []string{fullyFleshedContainerID},
},
expected: []*TagInfo{
{
Source: workloadmetaCollectorName,
Entity: podTaggerEntityID,
HighCardTags: []string{},
OrchestratorCardTags: []string{
fmt.Sprintf("pod_name:%s", podName),
},
LowCardTags: append([]string{
fmt.Sprintf("kube_namespace:%s", podNamespace),
}),
StandardTags: []string{},
},
{
Source: workloadmetaCollectorName,
Entity: fullyFleshedContainerTaggerEntityID,
HighCardTags: []string{
fmt.Sprintf("container_id:%s", fullyFleshedContainerID),
fmt.Sprintf("display_container_name:%s_%s", containerName, podName),
},
OrchestratorCardTags: []string{
fmt.Sprintf("pod_name:%s", podName),
},
LowCardTags: append([]string{
fmt.Sprintf("kube_namespace:%s", podNamespace),
fmt.Sprintf("kube_container_name:%s", containerName),
"image_id:datadog/agent@sha256:a63d3f66fb2f69d955d4f2ca0b229385537a77872ffc04290acae65aed5317d2",
"image_name:datadog/agent",
"image_tag:latest",
"short_image:agent",
}, standardTags...),
StandardTags: standardTags,
},
},
},
{
name: "pod with container, standard tags from labels",
pod: workloadmeta.KubernetesPod{
EntityID: podEntityID,
EntityMeta: workloadmeta.EntityMeta{
Name: podName,
Namespace: podNamespace,
Labels: map[string]string{
"tags.datadoghq.com/agent.env": env,
"tags.datadoghq.com/agent.service": svc,
"tags.datadoghq.com/agent.version": version,
},
},
Containers: []string{noEnvContainerID},
},
expected: []*TagInfo{
{
Source: workloadmetaCollectorName,
Entity: podTaggerEntityID,
HighCardTags: []string{},
OrchestratorCardTags: []string{
fmt.Sprintf("pod_name:%s", podName),
},
LowCardTags: append([]string{
fmt.Sprintf("kube_namespace:%s", podNamespace),
}),
StandardTags: []string{},
},
{
Source: workloadmetaCollectorName,
Entity: noEnvContainerTaggerEntityID,
HighCardTags: []string{
fmt.Sprintf("container_id:%s", noEnvContainerID),
fmt.Sprintf("display_container_name:%s_%s", containerName, podName),
},
OrchestratorCardTags: []string{
fmt.Sprintf("pod_name:%s", podName),
},
LowCardTags: append([]string{
fmt.Sprintf("kube_namespace:%s", podNamespace),
fmt.Sprintf("kube_container_name:%s", containerName),
}, standardTags...),
StandardTags: standardTags,
},
},
},
{
name: "pod from openshift deployment",
pod: workloadmeta.KubernetesPod{
EntityID: podEntityID,
EntityMeta: workloadmeta.EntityMeta{
Name: podName,
Namespace: podNamespace,
Annotations: map[string]string{
"openshift.io/deployment-config.latest-version": "1",
"openshift.io/deployment-config.name": "gitlab-ce",
"openshift.io/deployment.name": "gitlab-ce-1",
},
},
},
expected: []*TagInfo{
{
Source: workloadmetaCollectorName,
Entity: podTaggerEntityID,
HighCardTags: []string{},
OrchestratorCardTags: []string{
fmt.Sprintf("pod_name:%s", podName),
"oshift_deployment:gitlab-ce-1",
},
LowCardTags: append([]string{
fmt.Sprintf("kube_namespace:%s", podNamespace),
"oshift_deployment_config:gitlab-ce",
}),
StandardTags: []string{},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
collector := &WorkloadMetaCollector{
store: store,
}
collector.init(tt.labelsAsTags, tt.annotationsAsTags)
actual := collector.handleKubePod(workloadmeta.Event{
Type: workloadmeta.EventTypeSet,
Entity: tt.pod,
})
assertTagInfoListEqual(t, tt.expected, actual)
})
}
}
func TestParseJSONValue(t *testing.T) {
tests := []struct {
name string
value string
want map[string][]string
wantErr bool
}{
{
name: "empty json",
value: ``,
want: nil,
wantErr: true,
},
{
name: "invalid json",
value: `{key}`,
want: nil,
wantErr: true,
},
{
name: "invalid value",
value: `{"key1": "val1", "key2": 0}`,
want: map[string][]string{
"key1": {"val1"},
},
wantErr: false,
},
{
name: "strings and arrays",
value: `{"key1": "val1", "key2": ["val2"]}`,
want: map[string][]string{
"key1": {"val1"},
"key2": {"val2"},
},
wantErr: false,
},
{
name: "arrays only",
value: `{"key1": ["val1", "val11"], "key2": ["val2", "val22"]}`,
want: map[string][]string{
"key1": {"val1", "val11"},
"key2": {"val2", "val22"},
},
wantErr: false,
},
{
name: "strings only",
value: `{"key1": "val1", "key2": "val2"}`,
want: map[string][]string{
"key1": {"val1"},
"key2": {"val2"},
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := parseJSONValue(tt.value)
if (err != nil) != tt.wantErr {
t.Errorf("parseJSONValue() error = %v, wantErr %v", err, tt.wantErr)
return
}
assert.Len(t, got, len(tt.want))
for k, v := range tt.want {
assert.ElementsMatch(t, v, got[k])
}
})
}
}
| TestHandleKubePod |
dimension_test.go | // Copyright 2021 Splunk, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package smartagentreceiver
import (
"testing"
metadata "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata"
"github.com/signalfx/signalfx-agent/pkg/monitors/types"
"github.com/stretchr/testify/assert"
)
func TestDimensionToMetadataUpdate(t *testing.T) |
func TestPersistingPropertyAndTagWithSameName(t *testing.T) {
dimension := types.Dimension{
Properties: map[string]string{
"shared_name": "property_value",
},
Tags: map[string]bool{
"shared_name": true,
},
}
metadataUpdate := dimensionToMetadataUpdate(dimension)
expectedMetadataToUpdate := map[string]string{
"shared_name": "property_value",
}
assert.Equal(t, expectedMetadataToUpdate, metadataUpdate.MetadataToUpdate)
expectedMetadataToAdd := map[string]string{
"shared_name": "",
}
assert.Equal(t, expectedMetadataToAdd, metadataUpdate.MetadataToAdd)
assert.Empty(t, metadataUpdate.MetadataToRemove)
}
func TestPersistingPropertyAndRemovedTagWithSameName(t *testing.T) {
dimension := types.Dimension{
Properties: map[string]string{
"shared_name": "property_value",
},
Tags: map[string]bool{
"shared_name": false,
},
}
metadataUpdate := dimensionToMetadataUpdate(dimension)
expectedMetadataToUpdate := map[string]string{
"shared_name": "property_value",
}
assert.Equal(t, expectedMetadataToUpdate, metadataUpdate.MetadataToUpdate)
assert.Empty(t, metadataUpdate.MetadataToAdd)
expectedMetadataToRemove := map[string]string{
"shared_name": "",
}
assert.Equal(t, expectedMetadataToRemove, metadataUpdate.MetadataToRemove)
}
func TestRemovedPropertyAndPersistingTagWithSameName(t *testing.T) {
dimension := types.Dimension{
Properties: map[string]string{
"shared_name": "",
},
Tags: map[string]bool{
"shared_name": true,
},
}
metadataUpdate := dimensionToMetadataUpdate(dimension)
assert.Empty(t, metadataUpdate.MetadataToUpdate)
expectedMetadataToAdd := map[string]string{
"shared_name": "",
}
assert.Equal(t, expectedMetadataToAdd, metadataUpdate.MetadataToAdd)
expectedMetadataToRemove := map[string]string{
"shared_name": "sf_delete_this_property",
}
assert.Equal(t, expectedMetadataToRemove, metadataUpdate.MetadataToRemove)
}
func TestRemovedPropertyAndTagWithSameName(t *testing.T) {
t.Skipf("Not valid until use case is supported in SFx exporter")
dimension := types.Dimension{
Properties: map[string]string{
"shared_name": "",
},
Tags: map[string]bool{
"shared_name": false,
},
}
metadataUpdate := dimensionToMetadataUpdate(dimension)
assert.Empty(t, metadataUpdate.MetadataToAdd)
assert.Empty(t, metadataUpdate.MetadataToUpdate)
expectedMetadataToRemove := map[string]string{
"shared_name": "sf_delete_this_property",
}
assert.Equal(t, expectedMetadataToRemove, metadataUpdate.MetadataToRemove)
}
| {
dimension := types.Dimension{
Name: "my_dimension",
Value: "my_dimension_value",
Properties: map[string]string{
"this_property_should_be_updated": "with_this_property_value",
"this_property_should_be_removed": "",
},
Tags: map[string]bool{
"this_tag_should_be_added": true,
"this_tag_should_be_removed": false,
},
}
metadataUpdate := dimensionToMetadataUpdate(dimension)
assert.Equal(t, "my_dimension", metadataUpdate.ResourceIDKey)
assert.Equal(t, metadata.ResourceID("my_dimension_value"), metadataUpdate.ResourceID)
expectedMetadataToUpdate := map[string]string{
"this_property_should_be_updated": "with_this_property_value",
}
assert.Equal(t, expectedMetadataToUpdate, metadataUpdate.MetadataToUpdate)
expectedMetadataToAdd := map[string]string{
"this_tag_should_be_added": "",
}
assert.Equal(t, expectedMetadataToAdd, metadataUpdate.MetadataToAdd)
expectedMetadataToRemove := map[string]string{
"this_property_should_be_removed": "sf_delete_this_property",
"this_tag_should_be_removed": "",
}
assert.Equal(t, expectedMetadataToRemove, metadataUpdate.MetadataToRemove)
} |
OCLFullSplit.py | # -*- coding: utf-8 -*-
#
# Project: Azimuthal integration
# https://github.com/silx-kit/pyFAI
#
#
# Copyright (C) 2014-2018 European Synchrotron Radiation Facility, Grenoble, France
#
# Principal author: Jérôme Kieffer ([email protected])
# Giannis Ashiotis
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
__authors__ = ["Jérôme Kieffer", "Giannis Ashiotis"]
__license__ = "MIT"
__date__ = "18/02/2020"
__copyright__ = "2014, ESRF, Grenoble"
__contact__ = "[email protected]"
import os
import logging
import threading
import numpy
from . import ocl, pyopencl
from ..ext.splitBBoxLUT import HistoBBox1d
if pyopencl:
mf = pyopencl.mem_flags
else:
raise ImportError("pyopencl is not installed")
from ..utils import crc32, get_cl_file
logger = logging.getLogger(__name__)
class OCLFullSplit1d(object):
def __init__(self,
pos,
bins=100,
pos0Range=None,
pos1Range=None,
mask=None,
mask_checksum=None,
allow_pos0_neg=False,
unit="undefined",
workgroup_size=256,
devicetype="all",
platformid=None,
deviceid=None,
profile=False):
self.bins = bins
self.lut_size = 0
self.allow_pos0_neg = allow_pos0_neg
if len(pos.shape) == 3:
assert pos.shape[1] == 4
assert pos.shape[2] == 2
elif len(pos.shape) == 4:
assert pos.shape[2] == 4
assert pos.shape[3] == 2
else:
raise ValueError("Pos array dimentions are wrong")
self.pos_size = pos.size
self.size = self.pos_size / 8
self.pos = numpy.ascontiguousarray(pos.ravel(), dtype=numpy.float32)
self.pos0Range = numpy.empty(2, dtype=numpy.float32)
self.pos1Range = numpy.empty(2, dtype=numpy.float32)
if (pos0Range is not None) and (len(pos0Range) == 2):
self.pos0Range[0] = min(pos0Range) # do it on GPU?
self.pos0Range[1] = max(pos0Range)
if (not self.allow_pos0_neg) and (self.pos0Range[0] < 0):
self.pos0Range[0] = 0.0
if self.pos0Range[1] < 0:
print("Warning: Invalid 0-dim range! Using the data derived range instead")
self.pos0Range[1] = 0.0
# self.pos0Range[0] = pos0Range[0]
# self.pos0Range[1] = pos0Range[1]
else:
self.pos0Range[0] = 0.0
self.pos0Range[1] = 0.0
if (pos1Range is not None) and (len(pos1Range) == 2):
self.pos1Range[0] = min(pos1Range) # do it on GPU?
self.pos1Range[1] = max(pos1Range)
# self.pos1Range[0] = pos1Range[0]
# self.pos1Range[1] = pos1Range[1]
else:
self.pos1Range[0] = 0.0
self.pos1Range[1] = 0.0
if mask is not None:
assert mask.size == self.size
self.check_mask = True
self.cmask = numpy.ascontiguousarray(mask.ravel(), dtype=numpy.int8)
if mask_checksum:
self.mask_checksum = mask_checksum
else:
self.mask_checksum = crc32(mask)
else:
self.check_mask = False
self.mask_checksum = None
self._sem = threading.Semaphore()
self.profile = profile
self._cl_kernel_args = {}
self._cl_mem = {}
self.events = []
self.workgroup_size = workgroup_size
if self.size < self.workgroup_size:
raise RuntimeError("Fatal error in workgroup size selection. Size (%d) must be >= workgroup size (%d)\n", self.size, self.workgroup_size)
if (platformid is None) and (deviceid is None):
platformid, deviceid = ocl.select_device(devicetype)
elif platformid is None:
platformid = 0
elif deviceid is None:
deviceid = 0
self.platform = ocl.platforms[platformid]
self.device = self.platform.devices[deviceid]
self.device_type = self.device.type
if (self.device_type == "CPU") and (self.platform.vendor == "Apple"):
logger.warning("This is a workaround for Apple's OpenCL on CPU: enforce BLOCK_SIZE=1")
self.workgroup_size = 1
try:
self._ctx = pyopencl.Context(devices=[pyopencl.get_platforms()[platformid].get_devices()[deviceid]])
if self.profile:
self._queue = pyopencl.CommandQueue(self._ctx, properties=pyopencl.command_queue_properties.PROFILING_ENABLE)
else:
self._queue = pyopencl.CommandQueue(self._ctx)
self._compile_kernels()
self._calc_boundaries()
self._calc_LUT()
except pyopencl.MemoryError as error:
raise MemoryError(error)
def _compile_kernels(self, kernel_file=None):
"""
Call the OpenCL compiler
:param kernel_file: path tothe
"""
kernel_name = "ocl_lut.cl"
if kernel_file is None:
if os.path.isfile(kernel_name):
kernel_file = os.path.abspath(kernel_name)
else:
kernel_file = get_cl_file("pyfai:openCL/" + kernel_name)
else:
kernel_file = str(kernel_file)
kernel_src = open(kernel_file).read()
compile_options = "-D BINS=%i -D POS_SIZE=%i -D SIZE=%i -D WORKGROUP_SIZE=%i -D EPS=%e" % \
(self.bins, self.pos_size, self.size, self.workgroup_size, numpy.finfo(numpy.float32).eps)
logger.info("Compiling file %s with options %s", kernel_file, compile_options)
try:
self._program = pyopencl.Program(self._ctx, kernel_src).build(options=compile_options)
except pyopencl.MemoryError as error:
raise MemoryError(error)
def _calc_boundaries(self):
"""
comments
"""
# # # # # # # # Check for memory# # # # # # # #
size_of_float = numpy.dtype(numpy.float32).itemsize
ualloc = (self.pos_size * size_of_float)
ualloc += (self.workgroup_size * 4 * size_of_float)
ualloc += (4 * size_of_float)
memory = self.device.memory
if ualloc >= memory:
raise MemoryError("Fatal error in _allocate_buffers. Not enough device memory for buffers (%lu requested, %lu available)" % (ualloc, memory))
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # allocate memory # # # # # # # #
try:
# No returned event for profiling
# self._cl_mem["pos"] = pyopencl.array.to_device(self._queue, self.pos)
# self._cl_mem["preresult"] = pyopencl.array.empty(self._queue, (4*self.workgroup_size,), dtype=numpy.float32)
# self._cl_mem["minmax"] = pyopencl.array.empty(self._queue, (4,), dtype=numpy.float32)
self._cl_mem["pos"] = pyopencl.Buffer(self._ctx, mf.READ_ONLY, size_of_float * self.pos_size)
self._cl_mem["preresult"] = pyopencl.Buffer(self._ctx, mf.READ_WRITE, size_of_float * 4 * self.workgroup_size)
self._cl_mem["minmax"] = pyopencl.Buffer(self._ctx, mf.READ_WRITE, size_of_float * 4)
except pyopencl.MemoryError as error:
self._free_device_memory()
raise MemoryError(error)
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # move data # # # # # # # # # #
with self._sem:
copy_pos = pyopencl.enqueue_copy(self._queue, self._cl_mem["pos"], self.pos)
self.events += [("copy pos", copy_pos)]
# # # # # # # # set arguments # # # # # # # # #
self._cl_kernel_args["reduce_minmax_1"] = [self._cl_mem["pos"], self._cl_mem["preresult"]]
self._cl_kernel_args["reduce_minmax_2"] = [self._cl_mem["preresult"], self._cl_mem["minmax"]]
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # do the minmax reduction # # # # # #
with self._sem:
reduce_minmax_1 = self._program.reduce_minmax_1(self._queue, (self.workgroup_size * self.workgroup_size,), (self.workgroup_size,), *self._cl_kernel_args["reduce_minmax_1"])
self.events += [("reduce_minmax_1", reduce_minmax_1)]
reduce_minmax_2 = self._program.reduce_minmax_2(self._queue, (self.workgroup_size,), (self.workgroup_size,), *self._cl_kernel_args["reduce_minmax_2"])
self.events += [("reduce_minmax_2", reduce_minmax_2)]
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # # release the redundant data # # # # #
self._cl_mem["preresult"].release()
self._cl_mem.pop("preresult")
# # # # # # # # # # # # # # # # # # # # # # # #
# check memory of d_pos + d_preresult + d_minmax
# load d_pos
# allocate d_preresult
# allocate d_minmax
# run reduce1
# run reduce2
# save reference to d_minMax
# free d_preresult
def _calc_LUT(self):
"""
first need to call lut_1 and lut_2 to find the size of the LUT and the lut_3 to create it
"""
# # # # # # # # Check for memory# # # # # # # #
size_of_float = numpy.dtype(numpy.float32).itemsize
size_of_int = numpy.dtype(numpy.int32).itemsize
ualloc = (self.pos_size * size_of_float) # pos
ualloc += (4 * size_of_float) # minmax
ualloc += (2 * size_of_float) * 2 # pos0Range, pos1Range
ualloc += (self.bins * size_of_int) # outMax
ualloc += (1 * size_of_int) # lutsize
ualloc += ((self.bins + 1) * size_of_int) # idx_ptr
memory = self.device.memory
if ualloc >= memory:
raise MemoryError("Fatal error in _allocate_buffers. Not enough device memory for buffers (%lu requested, %lu available)" % (ualloc, memory))
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # allocate memory # # # # # # # #
try:
# self._cl_mem["pos0Range"] = pyopencl.Buffer(self._ctx, mf.READ_ONLY, size_of_float * 2)
# self._cl_mem["pos1Range"] = pyopencl.Buffer(self._ctx, mf.READ_ONLY, size_of_float * 2)
self._cl_mem["outMax"] = pyopencl.Buffer(self._ctx, mf.READ_WRITE, size_of_float * self.bins)
self._cl_mem["lutsize"] = pyopencl.Buffer(self._ctx, mf.READ_WRITE, size_of_float * 1)
self._cl_mem["idx_ptr"] = pyopencl.Buffer(self._ctx, mf.READ_WRITE, size_of_float * (self.bins + 1))
except pyopencl.MemoryError as error:
self._free_device_memory()
raise MemoryError(error)
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # move data # # # # # # # # # #
# with self._sem:
# copy_pos0Range = pyopencl.enqueue_copy(self._queue, self._cl_mem["pos0Range"], self.pos0Range)
# self.events += [("copy pos0Range", copy_pos0Range)]
# copy_pos1Range = pyopencl.enqueue_copy(self._queue, self._cl_mem["pos1Range"], self.pos1Range)
# self.events += [("copy pos1Range", copy_pos1Range)]
# # # # # # # # set arguments # # # # # # # # #
self._cl_kernel_args["memset_outMax"] = [self._cl_mem["outMax"]]
self._cl_kernel_args["lut_1"] = [self._cl_mem["pos"], self._cl_mem["minmax"], self.pos0Range.data, self.pos1Range.data, self._cl_mem["outMax"]]
self._cl_kernel_args["lut_2"] = [self._cl_mem["outMax"], self._cl_mem["idx_ptr"], self._cl_mem["lutsize"]]
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # start the LUT creation # # # # # #
memset_size = (self.bins + self.workgroup_size - 1) & ~(self.workgroup_size - 1),
global_size = (self.size + self.workgroup_size - 1) & ~(self.workgroup_size - 1),
with self._sem:
memset_outMax = self._program.memset_outMax(self._queue, memset_size, (self.workgroup_size,), *self._cl_kernel_args["memset_outMax"])
self.events += [("memset_outMax", memset_outMax)]
lut_1 = self._program.lut_1(self._queue, global_size, (self.workgroup_size,), *self._cl_kernel_args["lut_1"])
self.events += [("lut_1", lut_1)]
lut_2 = self._program.lut_2(self._queue, (1,), (1,), *self._cl_kernel_args["lut_2"])
self.events += [("lut_2", lut_2)]
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # get the lutsize # # # # # # # #
self.lutsize = numpy.ndarray(1, dtype=numpy.int32)
get_lutsize = pyopencl.enqueue_copy(self._queue, self.lutsize, self._cl_mem["lutsize"])
self.events += [("get_lutsize", get_lutsize)]
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # check memory # # # # # # # #
ualloc += (self.lutsize * size_of_int) # indices
ualloc += (self.lutsize * size_of_float) # data
if ualloc >= memory:
raise MemoryError("Fatal error in _allocate_buffers. Not enough device memory for buffers (%lu requested, %lu available)" % (ualloc, memory))
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # allocate memory # # # # # # # #
try:
self._cl_mem["indices"] = pyopencl.Buffer(self._ctx, mf.READ_WRITE, size_of_int * self.lutsize[0])
self._cl_mem["data"] = pyopencl.Buffer(self._ctx, mf.READ_WRITE, size_of_float * self.lutsize[0])
except pyopencl.MemoryError as error:
self._free_device_memory()
raise MemoryError(error)
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # set arguments # # # # # # # # #
self._cl_kernel_args["lut_3"] = [self._cl_mem["pos"], self._cl_mem["minmax"], self.pos0Range.data, self.pos1Range.data, self._cl_mem["outMax"], self._cl_mem["idx_ptr"], self._cl_mem["indices"], self._cl_mem["data"]]
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # # finish the LUT creation # # # # #
with self._sem:
memset_outMax = self._program.memset_outMax(self._queue, memset_size, (self.workgroup_size,), *self._cl_kernel_args["memset_outMax"])
self.events += [("memset_outMax", memset_outMax)]
lut_3 = self._program.lut_3(self._queue, global_size, (self.workgroup_size,), *self._cl_kernel_args["lut_3"])
self.events += [("lut_3", lut_3)]
# # # # # # # # # # # # # # # # # # # # # # # #
# # # # # release the redundant data # # # # #
self._cl_mem["pos"].release()
self._cl_mem.pop("pos")
self._cl_mem["minmax"].release()
self._cl_mem.pop("minmax")
# self._cl_mem["pos0Range"].release()
# self._cl_mem.pop("pos0Range")
# self._cl_mem["pos1Range"].release()
# self._cl_mem.pop("pos1Range")
self._cl_mem["outMax"].release()
self._cl_mem.pop("outMax") |
# check memory of d_pos + d_minmax + d_outMax + d_lutsize
# allocate d_outMax
# allocate d_lutsize
# memset d_outMax
# run lut1
# run lut2
# save d_lutsize
# memset d_outMax
# allocate d_data
# allocate d_indices
# run lut3
# free d_pos
# free d_minMax
# free d_lutsize
# run lut4
# free d_outMax
def _free_device_memory(self):
"""
free all memory allocated on the device
"""
for buffer_name in list(self._cl_mem.keys())[:]:
buf = self._cl_mem.pop[buffer_name]
if buf is not None:
try:
buf.release()
except pyopencl.LogicError:
logger.error("Error while freeing buffer %s", buffer_name)
def get_platform(self):
pass
def get_queue(self):
pass | self._cl_mem["lutsize"].release()
self._cl_mem.pop("lutsize")
# # # # # # # # # # # # # # # # # # # # # # # # |
scroll.d.ts | import { ElementRef } from '@angular/core';
export declare class | {
private _elementRef;
_scrollX: boolean;
_scrollY: boolean;
_zoom: boolean;
_maxZoom: number;
scrollX: any;
scrollY: any;
zoom: any;
maxZoom: any;
maxScale: number;
zoomDuration: number;
scrollElement: HTMLElement;
constructor(_elementRef: ElementRef);
ngOnInit(): void;
addScrollEventListener(handler: any): () => void;
}
| Scroll |
test_object_value.py | import pytest
@pytest.mark.usefixtures("smart_setup")
class TestObjectValue:
def test_get_sheet_object_value(self, smart_setup):
smart = smart_setup['smart']
sheet = smart.Sheets.get_sheet(smart_setup['sheet'].id, include='objectValue')
assert isinstance(sheet.rows[0].cells[0].object_value, smart.models.StringObjectValue)
assert isinstance(sheet, smart.models.Sheet)
def test_predecessors(self, smart_setup):
smart = smart_setup['smart']
templates = smart.Templates.list_public_templates(include_all=True)
for template in templates.data:
if template.name == 'Basic Project with Gantt & Dependencies':
break
sheet = smart.models.Sheet({
'name': 'example_project_python_sdk' + smart_setup['now'],
'fromId': template.id
})
action = smart.Home.create_sheet_from_template(sheet)
sheet = action.result
assert action.message == 'SUCCESS'
sheet = smart.Sheets.get_sheet(sheet.id)
# add 'Task1'
row = smart.models.Row()
row.to_bottom = True
for col in sheet.columns:
if col.primary:
row.cells.append({
'column_id': col.id,
'value': 'Task1'
})
break
action = smart.Sheets.add_rows(sheet.id, [row])
task1_row = action.result[0]
assert isinstance(task1_row, smart.models.row.Row)
assert action.request_response.status_code == 200
# add 'Task2' with 'Task1' predecessor
p1 = smart.models.Predecessor()
p1.type = 'FS'
p1.row_id = task1_row.id
predecessor_list = smart.models.PredecessorList()
predecessor_list.predecessors = [p1]
row = smart.models.Row()
row.to_bottom = True
for col in sheet.columns:
if col.primary:
row.cells.append({
'column_id': col.id,
'value': 'Task2'
})
if col.type == 'PREDECESSOR':
row.cells.append({
'column_id': col.id,
'object_value': predecessor_list
})
action = smart.Sheets.add_rows(sheet.id, [row])
task2_row = action.result[0]
assert isinstance(task2_row, smart.models.row.Row)
assert action.request_response.status_code == 200
# add 'Task3' with 'Task1','Task2' predecessors
p1 = smart.models.Predecessor()
p1.type = 'FS'
p1.row_id = task1_row.id
p2 = smart.models.Predecessor()
p2.type = 'FS'
p2.row_id = task2_row.id
predecessor_list = smart.models.PredecessorList()
predecessor_list.predecessors = [p1, p2]
row = smart.models.Row()
row.to_bottom = True
for col in sheet.columns:
if col.primary:
row.cells.append({
'column_id': col.id,
'value': 'Task3'
})
if col.type == 'PREDECESSOR':
row.cells.append({
'column_id': col.id,
'object_value': predecessor_list
})
action = smart.Sheets.add_rows(sheet.id, [row])
task3_row = action.result[0]
assert isinstance(task3_row, smart.models.row.Row)
assert action.request_response.status_code == 200
# clear the predecessor list from task 3
row = smart.models.Row()
row.id = task3_row.id
for col in sheet.columns:
if col.type == 'PREDECESSOR':
row.cells.append({
'column_id': col.id,
'value': smart.models.ExplicitNull()
})
break
action = smart.Sheets.update_rows(sheet.id, [row])
assert action.request_response.status_code == 200
for cell in action.data[0].cells:
if cell.column_id == col.id:
break;
assert cell.object_value is None |
# clean up
action = smart.Sheets.delete_sheet(sheet.id)
assert action.message == 'SUCCESS' | |
mod.rs | //! HTTP requests.
use std::{
collections::HashMap,
io::{self, BufRead, BufReader, Read, Write},
str::Utf8Error,
};
use url::{ParseError, Url};
use crate::body::Body;
pub mod builder;
/// The maximum number of headers which Puck will parse.
pub const MAX_HEADERS: usize = 20;
/// The new line delimiter.
pub const NEW_LINE: u8 = b'\n';
/// A HTTP request.
#[derive(Debug)]
pub struct Request {
pub(crate) headers: HashMap<String, String>,
pub(crate) method: Method,
pub(crate) body: Body,
pub(crate) url: Url,
}
impl Request {
/// Returns a builder to produce a new `Request` with. This method panics if the URL is not
/// valid.
pub fn build(url: impl AsRef<str>) -> builder::RequestBuilder {
builder::RequestBuilder::new(url)
}
/// Try to construct a builder from the provided URL, and return an error if the URL is invalid.
pub fn try_build(url: impl AsRef<str>) -> Result<builder::RequestBuilder, ParseError> {
builder::RequestBuilder::try_new(url)
}
/// Parse a `Request` from the provided stream (which must implement `Read` and be valid for
/// the `'static` lifetime.) This function will block until the `Request` has been parsed.
///
/// Note that if the request is empty, this will not return an error – instead it will return
/// `Ok(None)`.
pub fn parse(stream: impl Read + 'static) -> Result<Option<Self>, RequestParseError> {
let mut headers = [httparse::EMPTY_HEADER; MAX_HEADERS];
let mut req = httparse::Request::new(&mut headers);
let mut reader = BufReader::with_capacity(10000, stream);
let mut buf = Vec::new();
loop {
let bytes_read = match reader.read_until(NEW_LINE, &mut buf) {
Ok(t) => t,
Err(e) => {
return Err(From::from(e));
}
};
if bytes_read == 0 {
return Ok(None);
}
// todo – drop requests for headers which are too large
let idx = buf.len() - 1;
if idx >= 3 && &buf[idx - 3..=idx] == b"\r\n\r\n" {
break;
}
}
let _ = req.parse(&buf)?;
let method = Method::new_from_str(req.method.ok_or(RequestParseError::MissingMethod)?);
let headers = {
let mut map = HashMap::new();
for header in req.headers.iter() {
map.insert(
header.name.to_string(),
std::str::from_utf8(header.value)?.to_string(),
);
}
map
};
let url =
if let Some((_, host)) = headers.iter().find(|(k, _)| k.eq_ignore_ascii_case("host")) {
let url = req.path.ok_or(RequestParseError::InvalidUrl)?;
if url.starts_with("http://") || url.starts_with("https://") {
Url::parse(url)
} else if url.starts_with('/') {
Url::parse(&format!("http://{}{}", host, url))
} else if req.method.unwrap().eq_ignore_ascii_case("connect") {
Url::parse(&format!("http://{}/", host))
} else {
return Err(RequestParseError::InvalidUrl);
}
.map_err(|_| RequestParseError::InvalidUrl)?
} else {
return Err(RequestParseError::MissingHeader("Host".to_string()));
};
let body = Body::from_reader(
reader,
headers
.iter()
.find(|(key, _)| key.eq_ignore_ascii_case("content-length"))
.and_then(|(_, len)| len.as_str().parse::<usize>().ok()),
);
Ok(Some(Self {
headers,
method,
body,
url,
}))
}
/// Write this `Request` into the provided writer. Note that this will modify the `Request`
/// in-place; specifically, it will empty the contents of this `Request`'s body.
pub fn write(&mut self, write: &mut impl Write) -> io::Result<()> {
self.method.write(write)?;
write!(write, " {} ", self.url.path())?;
write!(write, "HTTP/1.1\r\n")?;
for (key, value) in &self.headers {
write!(write, "{}: {}\r\n", key, value)?;
}
write!(write, "\r\n")?;
std::io::copy(&mut self.body, write).map(drop)
}
/// Get a reference to the request's headers.
pub fn headers(&self) -> &HashMap<String, String> {
&self.headers
}
/// Get a reference to the request's method.
pub fn method(&self) -> &Method {
&self.method
}
/// Get a reference to the request's body.
pub fn body(&self) -> &Body {
&self.body
}
/// Replace the current `Body` with the supplied `Body`, returning the existing `Body`.
pub fn replace_body(&mut self, body: impl Into<Body>) -> Body {
let body = std::mem::replace(&mut self.body, body.into());
self.copy_content_type_from_body();
body
}
/// Take the `Body` from this request, replacing the `Request`'s body with an empty `Body`.
pub fn take_b | self) -> Body {
self.replace_body(Body::empty())
}
fn copy_content_type_from_body(&mut self) {
self.headers
.insert("Content-Type".into(), self.body.mime.to_string());
}
/// Get a reference to the request's url.
pub fn url(&self) -> &Url {
&self.url
}
}
#[derive(thiserror::Error, Debug)]
/// An error encountered when trying to parse a request.
pub enum RequestParseError {
/// Couldn't parse the request in question.
#[error("could not parse")]
CouldNotParse(httparse::Error),
/// A `Utf8Error` was encountered when parsing the request.
#[error("utf8 error")]
Utf8Error(Utf8Error),
/// An `IoError` was encountered when parsing the request.
#[error("io error")]
IoError(io::Error),
/// The URL supplied was not valid.
#[error("the supplied url was invalid")]
InvalidUrl,
/// A header is missing.
#[error("the `{0}` header is missing")]
MissingHeader(String),
/// The request method is missing.
#[error("missing method")]
MissingMethod,
}
impl From<std::io::Error> for RequestParseError {
fn from(e: std::io::Error) -> Self {
Self::IoError(e)
}
}
impl From<httparse::Error> for RequestParseError {
fn from(e: httparse::Error) -> Self {
Self::CouldNotParse(e)
}
}
impl From<Utf8Error> for RequestParseError {
fn from(e: Utf8Error) -> Self {
Self::Utf8Error(e)
}
}
#[derive(PartialEq, Eq, Clone, Debug)]
/// The HTTP method (e.g. "GET" or "POST")
#[allow(missing_docs)]
pub enum Method {
Get,
Post,
Head,
OtherMethod(String),
}
impl Method {
/// Create a new method from the provided string.
pub fn new_from_str(str: &str) -> Self {
match str.to_ascii_lowercase().as_str() {
"get" => Self::Get,
"post" => Self::Post,
_ => Self::OtherMethod(str.to_string()),
}
}
/// Write the given message to a TCP stream.
pub fn write(&self, write: &mut impl Write) -> io::Result<()> {
let to_write = match self {
Method::Get => "GET",
Method::Post => "POST",
Method::Head => "HEAD /",
Method::OtherMethod(name) => name,
};
write!(write, "{}", to_write)
}
}
| ody(&mut |
edit.rs | //! This module contains functions for editing syntax trees. As the trees are
//! immutable, all function here return a fresh copy of the tree, instead of
//! doing an in-place modification.
use std::{iter, ops::RangeInclusive};
use arrayvec::ArrayVec;
use crate::{
algo,
ast::{
self,
make::{self, tokens},
AstNode, TypeBoundsOwner,
},
AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, SyntaxKind,
SyntaxKind::{ATTR, COMMENT, WHITESPACE},
SyntaxNode, SyntaxToken, T,
};
use algo::{neighbor, SyntaxRewriter};
impl ast::BinExpr {
#[must_use]
pub fn replace_op(&self, op: SyntaxKind) -> Option<ast::BinExpr> {
let op_node: SyntaxElement = self.op_details()?.0.into();
let to_insert: Option<SyntaxElement> = Some(make::token(op).into());
Some(self.replace_children(single_node(op_node), to_insert))
}
}
impl ast::FnDef {
#[must_use]
pub fn with_body(&self, body: ast::Block) -> ast::FnDef {
let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new();
let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() {
old_body.syntax().clone().into()
} else if let Some(semi) = self.semicolon_token() {
to_insert.push(make::tokens::single_space().into());
semi.into()
} else {
to_insert.push(make::tokens::single_space().into());
to_insert.push(body.syntax().clone().into());
return self.insert_children(InsertPosition::Last, to_insert);
};
to_insert.push(body.syntax().clone().into());
self.replace_children(single_node(old_body_or_semi), to_insert)
}
}
impl ast::ItemList {
#[must_use]
pub fn append_items(&self, items: impl Iterator<Item = ast::ImplItem>) -> ast::ItemList {
let mut res = self.clone();
if !self.syntax().text().contains_char('\n') {
res = res.make_multiline();
}
items.for_each(|it| res = res.append_item(it));
res
}
#[must_use]
pub fn append_item(&self, item: ast::ImplItem) -> ast::ItemList {
let (indent, position) = match self.impl_items().last() {
Some(it) => (
leading_indent(it.syntax()).unwrap_or_default().to_string(),
InsertPosition::After(it.syntax().clone().into()),
),
None => match self.l_curly() {
Some(it) => (
" ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(),
InsertPosition::After(it),
),
None => return self.clone(),
},
};
let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
let to_insert: ArrayVec<[SyntaxElement; 2]> =
[ws.ws().into(), item.syntax().clone().into()].into();
self.insert_children(position, to_insert)
}
fn l_curly(&self) -> Option<SyntaxElement> {
self.syntax().children_with_tokens().find(|it| it.kind() == T!['{'])
}
fn make_multiline(&self) -> ast::ItemList {
let l_curly = match self.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) {
Some(it) => it,
None => return self.clone(),
};
let sibling = match l_curly.next_sibling_or_token() {
Some(it) => it,
None => return self.clone(),
};
let existing_ws = match sibling.as_token() {
None => None,
Some(tok) if tok.kind() != WHITESPACE => None,
Some(ws) => {
if ws.text().contains('\n') {
return self.clone();
}
Some(ws.clone())
}
};
let indent = leading_indent(self.syntax()).unwrap_or_default();
let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
let to_insert = iter::once(ws.ws().into());
match existing_ws {
None => self.insert_children(InsertPosition::After(l_curly), to_insert),
Some(ws) => self.replace_children(single_node(ws), to_insert),
}
}
}
impl ast::RecordFieldList {
#[must_use]
pub fn append_field(&self, field: &ast::RecordField) -> ast::RecordFieldList {
self.insert_field(InsertPosition::Last, field)
}
#[must_use]
pub fn insert_field(
&self,
position: InsertPosition<&'_ ast::RecordField>,
field: &ast::RecordField,
) -> ast::RecordFieldList {
let is_multiline = self.syntax().text().contains_char('\n');
let ws;
let space = if is_multiline {
ws = tokens::WsBuilder::new(&format!(
"\n{} ",
leading_indent(self.syntax()).unwrap_or_default()
));
ws.ws()
} else {
tokens::single_space()
};
let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new();
to_insert.push(space.into());
to_insert.push(field.syntax().clone().into());
to_insert.push(make::token(T![,]).into());
macro_rules! after_l_curly {
() => {{
let anchor = match self.l_curly() {
Some(it) => it,
None => return self.clone(),
};
InsertPosition::After(anchor)
}};
}
macro_rules! after_field {
($anchor:expr) => {
if let Some(comma) = $anchor
.syntax()
.siblings_with_tokens(Direction::Next)
.find(|it| it.kind() == T![,])
{
InsertPosition::After(comma)
} else {
to_insert.insert(0, make::token(T![,]).into());
InsertPosition::After($anchor.syntax().clone().into())
}
};
};
let position = match position {
InsertPosition::First => after_l_curly!(),
InsertPosition::Last => {
if !is_multiline {
// don't insert comma before curly
to_insert.pop();
}
match self.fields().last() {
Some(it) => after_field!(it),
None => after_l_curly!(),
}
}
InsertPosition::Before(anchor) => {
InsertPosition::Before(anchor.syntax().clone().into())
}
InsertPosition::After(anchor) => after_field!(anchor),
};
self.insert_children(position, to_insert)
}
fn l_curly(&self) -> Option<SyntaxElement> {
self.syntax().children_with_tokens().find(|it| it.kind() == T!['{'])
}
}
impl ast::TypeParam {
#[must_use]
pub fn remove_bounds(&self) -> ast::TypeParam {
let colon = match self.colon_token() {
Some(it) => it,
None => return self.clone(),
};
let end = match self.type_bound_list() {
Some(it) => it.syntax().clone().into(),
None => colon.clone().into(),
};
self.replace_children(colon.into()..=end, iter::empty())
}
}
impl ast::Path {
#[must_use]
pub fn with_segment(&self, segment: ast::PathSegment) -> ast::Path {
if let Some(old) = self.segment() {
return self.replace_children(
single_node(old.syntax().clone()),
iter::once(segment.syntax().clone().into()),
);
}
self.clone()
}
}
impl ast::PathSegment {
#[must_use]
pub fn with_type_args(&self, type_args: ast::TypeArgList) -> ast::PathSegment {
self._with_type_args(type_args, false)
}
#[must_use]
pub fn with_turbo_fish(&self, type_args: ast::TypeArgList) -> ast::PathSegment {
self._with_type_args(type_args, true)
}
fn _with_type_args(&self, type_args: ast::TypeArgList, turbo: bool) -> ast::PathSegment {
if let Some(old) = self.type_arg_list() {
return self.replace_children(
single_node(old.syntax().clone()),
iter::once(type_args.syntax().clone().into()),
);
}
let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new();
if turbo {
to_insert.push(make::token(T![::]).into());
}
to_insert.push(type_args.syntax().clone().into());
self.insert_children(InsertPosition::Last, to_insert)
}
}
impl ast::UseItem {
#[must_use]
pub fn with_use_tree(&self, use_tree: ast::UseTree) -> ast::UseItem {
if let Some(old) = self.use_tree() {
return self.replace_descendants(iter::once((old, use_tree)));
}
self.clone()
}
pub fn remove(&self) -> SyntaxRewriter<'static> {
let mut res = SyntaxRewriter::default();
res.delete(self.syntax());
let next_ws = self
.syntax()
.next_sibling_or_token()
.and_then(|it| it.into_token())
.and_then(ast::Whitespace::cast);
if let Some(next_ws) = next_ws |
res
}
}
impl ast::UseTree {
#[must_use]
pub fn with_path(&self, path: ast::Path) -> ast::UseTree {
if let Some(old) = self.path() {
return self.replace_descendants(iter::once((old, path)));
}
self.clone()
}
#[must_use]
pub fn with_use_tree_list(&self, use_tree_list: ast::UseTreeList) -> ast::UseTree {
if let Some(old) = self.use_tree_list() {
return self.replace_descendants(iter::once((old, use_tree_list)));
}
self.clone()
}
#[must_use]
pub fn split_prefix(&self, prefix: &ast::Path) -> ast::UseTree {
let suffix = match split_path_prefix(&prefix) {
Some(it) => it,
None => return self.clone(),
};
let use_tree = make::use_tree(suffix.clone(), self.use_tree_list(), self.alias());
let nested = make::use_tree_list(iter::once(use_tree));
return make::use_tree(prefix.clone(), Some(nested), None);
fn split_path_prefix(prefix: &ast::Path) -> Option<ast::Path> {
let parent = prefix.parent_path()?;
let mut res = make::path_unqualified(parent.segment()?);
for p in iter::successors(parent.parent_path(), |it| it.parent_path()) {
res = make::path_qualified(res, p.segment()?);
}
Some(res)
}
}
pub fn remove(&self) -> SyntaxRewriter<'static> {
let mut res = SyntaxRewriter::default();
res.delete(self.syntax());
for &dir in [Direction::Next, Direction::Prev].iter() {
if let Some(nb) = neighbor(self, dir) {
self.syntax()
.siblings_with_tokens(dir)
.skip(1)
.take_while(|it| it.as_node() != Some(nb.syntax()))
.for_each(|el| res.delete(&el));
return res;
}
}
res
}
}
#[must_use]
pub fn remove_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N {
N::cast(remove_attrs_and_docs_inner(node.syntax().clone())).unwrap()
}
fn remove_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode {
while let Some(start) =
node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT)
{
let end = match &start.next_sibling_or_token() {
Some(el) if el.kind() == WHITESPACE => el.clone(),
Some(_) | None => start.clone(),
};
node = algo::replace_children(&node, start..=end, &mut iter::empty());
}
node
}
#[derive(Debug, Clone, Copy)]
pub struct IndentLevel(pub u8);
impl From<u8> for IndentLevel {
fn from(level: u8) -> IndentLevel {
IndentLevel(level)
}
}
impl IndentLevel {
pub fn from_node(node: &SyntaxNode) -> IndentLevel {
let first_token = match node.first_token() {
Some(it) => it,
None => return IndentLevel(0),
};
for ws in prev_tokens(first_token).filter_map(ast::Whitespace::cast) {
let text = ws.syntax().text();
if let Some(pos) = text.rfind('\n') {
let level = text[pos + 1..].chars().count() / 4;
return IndentLevel(level as u8);
}
}
IndentLevel(0)
}
pub fn increase_indent<N: AstNode>(self, node: N) -> N {
N::cast(self._increase_indent(node.syntax().clone())).unwrap()
}
fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode {
let mut rewriter = SyntaxRewriter::default();
node.descendants_with_tokens()
.filter_map(|el| el.into_token())
.filter_map(ast::Whitespace::cast)
.filter(|ws| {
let text = ws.syntax().text();
text.contains('\n')
})
.for_each(|ws| {
let new_ws = make::tokens::whitespace(&format!(
"{}{:width$}",
ws.syntax().text(),
"",
width = self.0 as usize * 4
));
rewriter.replace(ws.syntax(), &new_ws)
});
rewriter.rewrite(&node)
}
pub fn decrease_indent<N: AstNode>(self, node: N) -> N {
N::cast(self._decrease_indent(node.syntax().clone())).unwrap()
}
fn _decrease_indent(self, node: SyntaxNode) -> SyntaxNode {
let mut rewriter = SyntaxRewriter::default();
node.descendants_with_tokens()
.filter_map(|el| el.into_token())
.filter_map(ast::Whitespace::cast)
.filter(|ws| {
let text = ws.syntax().text();
text.contains('\n')
})
.for_each(|ws| {
let new_ws = make::tokens::whitespace(
&ws.syntax().text().replace(&format!("\n{:1$}", "", self.0 as usize * 4), "\n"),
);
rewriter.replace(ws.syntax(), &new_ws)
});
rewriter.rewrite(&node)
}
}
// FIXME: replace usages with IndentLevel above
fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> {
for token in prev_tokens(node.first_token()?) {
if let Some(ws) = ast::Whitespace::cast(token.clone()) {
let ws_text = ws.text();
if let Some(pos) = ws_text.rfind('\n') {
return Some(ws_text[pos + 1..].into());
}
}
if token.text().contains('\n') {
break;
}
}
None
}
fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
iter::successors(Some(token), |token| token.prev_token())
}
pub trait AstNodeEdit: AstNode + Sized {
#[must_use]
fn insert_children(
&self,
position: InsertPosition<SyntaxElement>,
to_insert: impl IntoIterator<Item = SyntaxElement>,
) -> Self {
let new_syntax = algo::insert_children(self.syntax(), position, to_insert);
Self::cast(new_syntax).unwrap()
}
#[must_use]
fn replace_children(
&self,
to_replace: RangeInclusive<SyntaxElement>,
to_insert: impl IntoIterator<Item = SyntaxElement>,
) -> Self {
let new_syntax = algo::replace_children(self.syntax(), to_replace, to_insert);
Self::cast(new_syntax).unwrap()
}
#[must_use]
fn replace_descendants<D: AstNode>(
&self,
replacement_map: impl IntoIterator<Item = (D, D)>,
) -> Self {
let mut rewriter = SyntaxRewriter::default();
for (from, to) in replacement_map {
rewriter.replace(from.syntax(), to.syntax())
}
rewriter.rewrite_ast(self)
}
}
impl<N: AstNode> AstNodeEdit for N {}
fn single_node(element: impl Into<SyntaxElement>) -> RangeInclusive<SyntaxElement> {
let element = element.into();
element.clone()..=element
}
#[test]
fn test_increase_indent() {
let arm_list = {
let arm = make::match_arm(iter::once(make::placeholder_pat().into()), make::expr_unit());
make::match_arm_list(vec![arm.clone(), arm])
};
assert_eq!(
arm_list.syntax().to_string(),
"{
_ => (),
_ => (),
}"
);
let indented = IndentLevel(2).increase_indent(arm_list);
assert_eq!(
indented.syntax().to_string(),
"{
_ => (),
_ => (),
}"
);
}
| {
let ws_text = next_ws.syntax().text();
if ws_text.starts_with('\n') {
let rest = &ws_text[1..];
if rest.is_empty() {
res.delete(next_ws.syntax())
} else {
res.replace(next_ws.syntax(), &make::tokens::whitespace(rest));
}
}
} |
graphics.py | # -*- coding: utf-8 -*-
"""
The graphics header element definition.
"""
from .base import NITFElement, UserHeaderType, _IntegerDescriptor,\
_StringDescriptor, _StringEnumDescriptor, _NITFElementDescriptor
from .security import NITFSecurityTags
__classification__ = "UNCLASSIFIED"
__author__ = "Thomas McCullough"
class | (NITFElement):
"""
Graphics segment subheader - see standards document MIL-STD-2500C for more
information.
"""
_ordering = (
'SY', 'SID', 'SNAME', 'Security', 'ENCRYP', 'SFMT',
'SSTRUCT', 'SDLVL', 'SALVL', 'SLOC', 'SBND1',
'SCOLOR', 'SBND2', 'SRES2', 'UserHeader')
_lengths = {
'SY': 2, 'SID': 10, 'SNAME': 20, 'ENCRYP': 1,
'SFMT': 1, 'SSTRUCT': 13, 'SDLVL': 3, 'SALVL': 3,
'SLOC': 10, 'SBND1': 10, 'SCOLOR': 1, 'SBND2': 10,
'SRES2': 2}
SY = _StringEnumDescriptor(
'SY', True, 2, {'SY', }, default_value='SY',
docstring='File part type.') # type: str
SID = _StringDescriptor(
'SID', True, 10, default_value='',
docstring='Graphic Identifier. This field shall contain a valid alphanumeric identification code '
'associated with the graphic. The valid codes are determined by the application.') # type: str
SNAME = _StringDescriptor(
'SNAME', True, 20, default_value='',
docstring='Graphic name. This field shall contain an alphanumeric name for the graphic.') # type: str
Security = _NITFElementDescriptor(
'Security', True, NITFSecurityTags, default_args={},
docstring='The security tags.') # type: NITFSecurityTags
ENCRYP = _StringEnumDescriptor(
'ENCRYP', True, 1, {'0'}, default_value='0',
docstring='Encryption.') # type: str
SFMT = _StringDescriptor(
'SFMT', True, 1, default_value='C',
docstring='Graphic Type. This field shall contain a valid indicator of the '
'representation type of the graphic.') # type: str
SSTRUCT = _IntegerDescriptor(
'SSTRUCT', True, 13, default_value=0,
docstring='Reserved for Future Use.') # type: int
SDLVL = _IntegerDescriptor(
'SDLVL', True, 3, default_value=1,
docstring='Graphic Display Level. This field shall contain a valid value that indicates '
'the graphic display level of the graphic relative to other displayed file '
'components in a composite display. The valid values are :code:`1-999`. '
'The display level of each displayable file component (image or graphic) '
'within a file shall be unique.') # type: int
SALVL = _IntegerDescriptor(
'SALVL', True, 3, default_value=0,
docstring='Graphic Attachment Level. This field shall contain a valid value '
'that indicates the attachment level of the graphic. Valid values for '
'this field are 0 and the display level value of any other '
'image or graphic in the file.') # type: int
SLOC = _IntegerDescriptor(
'SLOC', True, 10, default_value=0,
docstring='Graphic Location. The graphics location is specified by providing the location '
'of the graphic’s origin point relative to the position (location of the CCS, image, '
'or graphic to which it is attached. This field shall contain the graphic location '
'offset from the `ILOC` or `SLOC` value of the CCS, image, or graphic to which the graphic '
'is attached or from the origin of the CCS when the graphic is unattached (`SALVL = 0`). '
'A row and column value of :code:`0` indicates no offset. Positive row and column values indicate '
'offsets down and to the right, while negative row and column values indicate '
'offsets up and to the left.') # type: int
SBND1 = _IntegerDescriptor(
'SBND1', True, 10, default_value=0,
docstring='First Graphic Bound Location. This field shall contain an ordered pair of '
'integers defining a location in Cartesian coordinates for use with CGM graphics. It is '
'the upper left corner of the bounding box for the CGM graphic.') # type: int
SCOLOR = _StringEnumDescriptor(
'SCOLOR', True, 1, {'C', 'M'}, default_value='M',
docstring='Graphic Color. If `SFMT = C`, this field shall contain a :code:`C` if the CGM contains any '
'color pieces or an :code:`M` if it is monochrome (i.e., black, '
'white, or levels of grey).') # type: str
SBND2 = _IntegerDescriptor(
'SBND2', True, 10, default_value=0,
docstring='Second Graphic Bound Location. This field shall contain an ordered pair of '
'integers defining a location in Cartesian coordinates for use with CGM graphics. '
'It is the lower right corner of the bounding box for the CGM graphic.') # type: int
SRES2 = _IntegerDescriptor(
'SRES2', True, 2, default_value=0,
docstring='Reserved for Future Use.') # type: int
UserHeader = _NITFElementDescriptor(
'UserHeader', True, UserHeaderType, default_args={},
docstring='User defined header.') # type: UserHeaderType
| GraphicsSegmentHeader |
proxy_suite_test.go | // Copyright 2021 The Operator-SDK Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package proxy_test
import (
"testing"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
func | (t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Proxy Suite")
}
| TestProxy |
nDCWorkflowResetter.go | // The MIT License
//
// Copyright (c) 2020 Temporal Technologies Inc. All rights reserved.
//
// Copyright (c) 2020 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//go:generate mockgen -copyright_file ../../LICENSE -package $GOPACKAGE -source $GOFILE -destination nDCWorkflowResetter_mock.go
package history
import (
"context"
"time"
"github.com/pborman/uuid"
"github.com/temporalio/temporal/common"
"github.com/temporalio/temporal/common/convert"
"github.com/temporalio/temporal/common/definition"
"github.com/temporalio/temporal/common/log"
"github.com/temporalio/temporal/common/persistence"
)
const (
resendOnResetWorkflowMessage = "Resend events due to reset workflow"
)
type (
nDCWorkflowResetter interface {
resetWorkflow(
ctx context.Context,
now time.Time,
baseLastEventID int64,
baseLastEventVersion int64,
incomingFirstEventID int64,
incomingFirstEventVersion int64,
) (mutableState, error)
}
| nDCWorkflowResetterImpl struct {
shard ShardContext
transactionMgr nDCTransactionMgr
historyV2Mgr persistence.HistoryManager
stateRebuilder nDCStateRebuilder
namespaceID string
workflowID string
baseRunID string
newContext workflowExecutionContext
newRunID string
logger log.Logger
}
)
var _ nDCWorkflowResetter = (*nDCWorkflowResetterImpl)(nil)
func newNDCWorkflowResetter(
shard ShardContext,
transactionMgr nDCTransactionMgr,
namespaceID string,
workflowID string,
baseRunID string,
newContext workflowExecutionContext,
newRunID string,
logger log.Logger,
) *nDCWorkflowResetterImpl {
return &nDCWorkflowResetterImpl{
shard: shard,
transactionMgr: transactionMgr,
historyV2Mgr: shard.GetHistoryManager(),
stateRebuilder: newNDCStateRebuilder(shard, logger),
namespaceID: namespaceID,
workflowID: workflowID,
baseRunID: baseRunID,
newContext: newContext,
newRunID: newRunID,
logger: logger,
}
}
func (r *nDCWorkflowResetterImpl) resetWorkflow(
ctx context.Context,
now time.Time,
baseLastEventID int64,
baseLastEventVersion int64,
incomingFirstEventID int64,
incomingFirstEventVersion int64,
) (mutableState, error) {
baseBranchToken, err := r.getBaseBranchToken(
ctx,
baseLastEventID,
baseLastEventVersion,
incomingFirstEventID,
incomingFirstEventVersion,
)
if err != nil {
return nil, err
}
resetBranchToken, err := r.getResetBranchToken(ctx, baseBranchToken, baseLastEventID)
requestID := uuid.New()
rebuildMutableState, rebuiltHistorySize, err := r.stateRebuilder.rebuild(
ctx,
now,
definition.NewWorkflowIdentifier(
r.namespaceID,
r.workflowID,
r.baseRunID,
),
baseBranchToken,
baseLastEventID,
baseLastEventVersion,
definition.NewWorkflowIdentifier(
r.namespaceID,
r.workflowID,
r.newRunID,
),
resetBranchToken,
requestID,
)
if err != nil {
return nil, err
}
r.newContext.clear()
r.newContext.setHistorySize(rebuiltHistorySize)
return rebuildMutableState, nil
}
func (r *nDCWorkflowResetterImpl) getBaseBranchToken(
ctx context.Context,
baseLastEventID int64,
baseLastEventVersion int64,
incomingFirstEventID int64,
incomingFirstEventVersion int64,
) (baseBranchToken []byte, retError error) {
baseWorkflow, err := r.transactionMgr.loadNDCWorkflow(
ctx,
r.namespaceID,
r.workflowID,
r.baseRunID,
)
if err != nil {
return nil, err
}
defer func() {
baseWorkflow.getReleaseFn()(retError)
}()
baseVersionHistories := baseWorkflow.getMutableState().GetVersionHistories()
index, err := baseVersionHistories.FindFirstVersionHistoryIndexByItem(
persistence.NewVersionHistoryItem(baseLastEventID, baseLastEventVersion),
)
if err != nil {
// the base event and incoming event are from different branch
// only re-replicate the gap on the incoming branch
// the base branch event will eventually arrived
return nil, newNDCRetryTaskErrorWithHint(
resendOnResetWorkflowMessage,
r.namespaceID,
r.workflowID,
r.newRunID,
common.EmptyEventID,
common.EmptyVersion,
incomingFirstEventID,
incomingFirstEventVersion,
)
}
baseVersionHistory, err := baseVersionHistories.GetVersionHistory(index)
if err != nil {
return nil, err
}
return baseVersionHistory.GetBranchToken(), nil
}
func (r *nDCWorkflowResetterImpl) getResetBranchToken(
ctx context.Context,
baseBranchToken []byte,
baseLastEventID int64,
) ([]byte, error) {
// fork a new history branch
shardID := r.shard.GetShardID()
resp, err := r.historyV2Mgr.ForkHistoryBranch(&persistence.ForkHistoryBranchRequest{
ForkBranchToken: baseBranchToken,
ForkNodeID: baseLastEventID + 1,
Info: persistence.BuildHistoryGarbageCleanupInfo(r.namespaceID, r.workflowID, r.newRunID),
ShardID: convert.IntPtr(shardID),
})
if err != nil {
return nil, err
}
return resp.NewBranchToken, nil
} | |
jump.go | package jump
import (
"math"
)
func Jump(nums []int) int {
n := len(nums)
if n <= 1 {
return 0
}
dp := make([]int, n)
for i := range dp {
dp[i] = math.MaxInt32
}
dp[n-1], dp[n-2] = 0, 1
for i := n - 3; i >= 0; i-- {
if nums[i] >= n-i-1 {
dp[i] = 1
} else {
for j := 1; j <= nums[i]; j++ {
if 1+dp[i+j] < dp[i] {
dp[i] = 1 + dp[i+j]
}
}
}
}
return dp[0]
}
func Jump2(nums []int) int {
n := len(nums)
if n <= 1 {
return 0
}
// assume we can always jump to the last index
var start, end, step, maxend int
for end < n-1 {
step++
maxend = end + 1
for i := start; i <= end; i++ {
if i+nums[i] >= n-1 {
return step
}
if i+nums[i] > maxend {
maxend = i + nums[i]
}
}
start = end + 1
end = maxend
}
return step
}
func | (nums []int) int {
i, step, end, maxend, n := 0, 0, 0, 0, len(nums)
for end < n-1 {
step++
for ; i <= end; i++ {
if i+nums[i] > maxend {
maxend = i + nums[i]
}
if maxend >= n-1 {
return step
}
}
if end == maxend {
// can not reach the end
break
}
end = maxend
}
if n == 1 {
return 0
}
return -1
}
| Jump3 |
jit.rs | use std::collections::HashMap;
use cranelift::prelude::*;
use cranelift_module::{DataContext, Linkage, Module};
use cranelift_simplejit::{SimpleJITBackend, SimpleJITBuilder};
use frontend::*;
use std::ffi::CString;
use std::io::{stdout, Write};
use std::slice;
/// The basic JIT class.
pub struct JIT {
/// The function builder context, which is reused across multiple
/// FunctionBuilder instances.
builder_context: FunctionBuilderContext,
/// The main Cranelift context, which holds the state for codegen. Cranelift
/// separates this from `Module` to allow for parallel compilation, with a
/// context per thread, though this isn't in the simple demo here.
ctx: codegen::Context,
/// The data context, which is to data objects what `ctx` is to functions.
data_ctx: DataContext,
/// The module, with the simplejit backend, which manages the JIT'd
/// functions.
module: Module<SimpleJITBackend>,
}
fn print_num(n: i64) -> i64 {
let mut stdout = stdout();
let mut had_error = writeln!(stdout, "{}", n).is_err();
had_error = stdout.lock().flush().is_err() || had_error;
had_error as i64
}
impl JIT {
/// Create a new `JIT` instance.
pub fn new() -> Self {
let mut builder = SimpleJITBuilder::new(cranelift_module::default_libcall_names());
let addr = print_num as *const u8;
builder.symbol("print_num", addr);
let mut module = Module::new(builder);
let mut sig = module.make_signature();
sig.params.push(AbiParam::new(types::I64));
sig.returns.push(AbiParam::new(types::I64));
module
.declare_function("print_num", Linkage::Import, &sig)
.unwrap();
Self {
builder_context: FunctionBuilderContext::new(),
ctx: module.make_context(),
data_ctx: DataContext::new(),
module,
}
}
/// Compile a string in the toy language into machine code.
pub fn compile(&mut self, input: &str) -> Result<*const u8, String> {
let module = parse(&input)?;
for (i, s) in module.strings.iter().enumerate() {
self.create_data(
&format!("//s{}//", i),
CString::new(s.as_str()).unwrap().into_bytes(),
)?;
}
let mut main = None;
for func in module.functions {
if func.name == "main" {
main = Some(self.compile_func(func)?)
} else {
self.compile_func(func)?;
}
}
match main {
Some(ptr) => Ok(ptr),
None => Err("no main function in module".to_owned()),
}
}
/// Compile a string in the toy language into machine code.
fn compile_func(&mut self, func: Function) -> Result<*const u8, String> {
// First, parse the string, producing AST nodes.
let Function {
name,
params,
the_return,
stmts,
} = func;
// Then, translate the AST nodes into Cranelift IR.
self.translate(params, the_return, stmts)?;
// Next, declare the function to simplejit. Functions must be declared
// before they can be called, or defined.
//
// TODO: This may be an area where the API should be streamlined; should
// we have a version of `declare_function` that automatically declares
// the function?
let id = self
.module
.declare_function(&name, Linkage::Export, &self.ctx.func.signature)
.map_err(|e| e.to_string())?;
// Define the function to simplejit. This finishes compilation, although
// there may be outstanding relocations to perform. Currently, simplejit
// cannot finish relocations until all functions to be called are
// defined. For this toy demo for now, we'll just finalize the function
// below.
self.module
.define_function(id, &mut self.ctx, &mut codegen::binemit::NullTrapSink {})
.map_err(|e| e.to_string())?;
// Now that compilation is finished, we can clear out the context state.
self.module.clear_context(&mut self.ctx);
// Finalize the functions which we just defined, which resolves any
// outstanding relocations (patching in addresses, now that they're
// available).
self.module.finalize_definitions();
// We can now retrieve a pointer to the machine code.
let code = self.module.get_finalized_function(id);
Ok(code)
}
/// Create a zero-initialized data section.
pub fn create_data(&mut self, name: &str, contents: Vec<u8>) -> Result<&[u8], String> {
// The steps here are analogous to `compile`, except that data is much
// simpler than functions.
self.data_ctx.define(contents.into_boxed_slice());
let id = self
.module
.declare_data(name, Linkage::Export, true, false, None)
.map_err(|e| e.to_string())?;
self.module
.define_data(id, &self.data_ctx)
.map_err(|e| e.to_string())?;
self.data_ctx.clear();
self.module.finalize_definitions();
let buffer = self.module.get_finalized_data(id);
// TODO: Can we move the unsafe into cranelift?
Ok(unsafe { slice::from_raw_parts(buffer.0, buffer.1) })
}
// Translate from toy-language AST nodes into Cranelift IR.
fn translate(
&mut self,
params: Vec<String>,
the_return: String,
stmts: Vec<Expr>,
) -> Result<(), String> {
// Our toy language currently only supports I64 values, though Cranelift
// supports other types.
let int = self.module.target_config().pointer_type();
for _p in ¶ms {
self.ctx.func.signature.params.push(AbiParam::new(int));
}
// Our toy language currently only supports one return value, though
// Cranelift is designed to support more.
self.ctx.func.signature.returns.push(AbiParam::new(int));
// Create the builder to build a function.
let mut builder = FunctionBuilder::new(&mut self.ctx.func, &mut self.builder_context);
// Create the entry block, to start emitting code in.
let entry_block = builder.create_block();
// Since this is the entry block, add block parameters corresponding to
// the function's parameters.
//
// TODO: Streamline the API here.
builder.append_block_params_for_function_params(entry_block);
// Tell the builder to emit code in this block.
builder.switch_to_block(entry_block);
// And, tell the builder that this block will have no further
// predecessors. Since it's the entry block, it won't have any
// predecessors.
builder.seal_block(entry_block);
// The toy language allows variables to be declared implicitly.
// Walk the AST and declare all implicitly-declared variables.
let variables =
declare_variables(int, &mut builder, ¶ms, &the_return, &stmts, entry_block);
// Now translate the statements of the function body.
let mut trans = FunctionTranslator {
int,
builder,
variables,
module: &mut self.module,
};
for expr in stmts {
trans.translate_expr(expr);
}
// Set up the return variable of the function. Above, we declared a
// variable to hold the return value. Here, we just do a use of that
// variable.
let return_variable = trans.variables.get(&the_return).unwrap();
let return_value = trans.builder.use_var(*return_variable);
// Emit the return instruction.
trans.builder.ins().return_(&[return_value]);
// Tell the builder we're done with this function.
trans.builder.finalize();
Ok(())
}
}
/// A collection of state used for translating from toy-language AST nodes
/// into Cranelift IR.
struct FunctionTranslator<'a> {
int: types::Type,
builder: FunctionBuilder<'a>,
variables: HashMap<String, Variable>,
module: &'a mut Module<SimpleJITBackend>,
}
impl<'a> FunctionTranslator<'a> {
/// When you write out instructions in Cranelift, you get back `Value`s. You
/// can then use these references in other instructions.
fn translate_expr(&mut self, expr: Expr) -> Value {
match expr {
Expr::Number(literal) => {
let imm: i32 = literal.parse().unwrap();
self.builder.ins().iconst(self.int, i64::from(imm))
}
Expr::String(idx) => {
let name = format!("//s{}//", idx);
self.translate_global_data_addr(name)
}
Expr::Add(lhs, rhs) => {
let lhs = self.translate_expr(*lhs);
let rhs = self.translate_expr(*rhs);
self.builder.ins().iadd(lhs, rhs)
}
Expr::Sub(lhs, rhs) => {
let lhs = self.translate_expr(*lhs);
let rhs = self.translate_expr(*rhs);
self.builder.ins().isub(lhs, rhs)
}
Expr::Mul(lhs, rhs) => {
let lhs = self.translate_expr(*lhs);
let rhs = self.translate_expr(*rhs);
self.builder.ins().imul(lhs, rhs)
}
Expr::Div(lhs, rhs) => {
let lhs = self.translate_expr(*lhs);
let rhs = self.translate_expr(*rhs);
self.builder.ins().udiv(lhs, rhs)
}
Expr::Eq(lhs, rhs) => {
let lhs = self.translate_expr(*lhs);
let rhs = self.translate_expr(*rhs);
let c = self.builder.ins().icmp(IntCC::Equal, lhs, rhs);
self.builder.ins().bint(self.int, c)
}
Expr::Ne(lhs, rhs) => { | let lhs = self.translate_expr(*lhs);
let rhs = self.translate_expr(*rhs);
let c = self.builder.ins().icmp(IntCC::NotEqual, lhs, rhs);
self.builder.ins().bint(self.int, c)
}
Expr::Lt(lhs, rhs) => {
let lhs = self.translate_expr(*lhs);
let rhs = self.translate_expr(*rhs);
let c = self.builder.ins().icmp(IntCC::SignedLessThan, lhs, rhs);
self.builder.ins().bint(self.int, c)
}
Expr::Le(lhs, rhs) => {
let lhs = self.translate_expr(*lhs);
let rhs = self.translate_expr(*rhs);
let c = self
.builder
.ins()
.icmp(IntCC::SignedLessThanOrEqual, lhs, rhs);
self.builder.ins().bint(self.int, c)
}
Expr::Gt(lhs, rhs) => {
let lhs = self.translate_expr(*lhs);
let rhs = self.translate_expr(*rhs);
let c = self.builder.ins().icmp(IntCC::SignedGreaterThan, lhs, rhs);
self.builder.ins().bint(self.int, c)
}
Expr::Ge(lhs, rhs) => {
let lhs = self.translate_expr(*lhs);
let rhs = self.translate_expr(*rhs);
let c = self
.builder
.ins()
.icmp(IntCC::SignedGreaterThanOrEqual, lhs, rhs);
self.builder.ins().bint(self.int, c)
}
Expr::Call(name, args) => self.translate_call(name, args),
Expr::GlobalDataAddr(name) => self.translate_global_data_addr(name),
Expr::Identifier(name) => {
// `use_var` is used to read the value of a variable.
let variable = self.variables.get(&name).expect("variable not defined");
self.builder.use_var(*variable)
}
Expr::Assign(name, expr) => {
// `def_var` is used to write the value of a variable. Note that
// variables can have multiple definitions. Cranelift will
// convert them into SSA form for itself automatically.
let new_value = self.translate_expr(*expr);
let variable = self.variables.get(&name).unwrap();
self.builder.def_var(*variable, new_value);
new_value
}
Expr::IfElse(condition, then_body, else_body) => {
let condition_value = self.translate_expr(*condition);
let then_block = self.builder.create_block();
let else_block = self.builder.create_block();
let merge_block = self.builder.create_block();
// If-else constructs in the toy language have a return value.
// In traditional SSA form, this would produce a PHI between
// the then and else bodies. Cranelift uses block parameters,
// so set up a parameter in the merge block, and we'll pass
// the return values to it from the branches.
self.builder.append_block_param(merge_block, self.int);
// Test the if condition and conditionally branch.
self.builder.ins().brz(condition_value, else_block, &[]);
// Fall through to then block.
self.builder.ins().jump(then_block, &[]);
self.builder.switch_to_block(then_block);
self.builder.seal_block(then_block);
let mut then_return = self.builder.ins().iconst(self.int, 0);
for expr in then_body {
then_return = self.translate_expr(expr);
}
// Jump to the merge block, passing it the block return value.
self.builder.ins().jump(merge_block, &[then_return]);
self.builder.switch_to_block(else_block);
self.builder.seal_block(else_block);
let mut else_return = self.builder.ins().iconst(self.int, 0);
for expr in else_body {
else_return = self.translate_expr(expr);
}
// Jump to the merge block, passing it the block return value.
self.builder.ins().jump(merge_block, &[else_return]);
// Switch to the merge block for subsequent statements.
self.builder.switch_to_block(merge_block);
// We've now seen all the predecessors of the merge block.
self.builder.seal_block(merge_block);
// Read the value of the if-else by reading the merge block
// parameter.
let phi = self.builder.block_params(merge_block)[0];
phi
}
Expr::WhileLoop(condition, loop_body) => {
let header_block = self.builder.create_block();
let body_block = self.builder.create_block();
let exit_block = self.builder.create_block();
self.builder.ins().jump(header_block, &[]);
self.builder.switch_to_block(header_block);
let condition_value = self.translate_expr(*condition);
self.builder.ins().brz(condition_value, exit_block, &[]);
self.builder.ins().jump(body_block, &[]);
self.builder.switch_to_block(body_block);
self.builder.seal_block(body_block);
for expr in loop_body {
self.translate_expr(expr);
}
self.builder.ins().jump(header_block, &[]);
self.builder.switch_to_block(exit_block);
// We've reached the bottom of the loop, so there will be no
// more backedges to the header to exits to the bottom.
self.builder.seal_block(header_block);
self.builder.seal_block(exit_block);
// Just return 0 for now.
self.builder.ins().iconst(self.int, 0)
}
}
}
fn translate_call(&mut self, name: String, args: Vec<Expr>) -> Value {
let mut sig = self.module.make_signature();
// Add a parameter for each argument.
for _arg in &args {
sig.params.push(AbiParam::new(self.int));
}
// For simplicity for now, just make all calls return a single I64.
sig.returns.push(AbiParam::new(self.int));
// TODO: Streamline the API here?
let callee = self
.module
.declare_function(&name, Linkage::Import, &sig)
.expect("problem declaring function");
let local_callee = self
.module
.declare_func_in_func(callee, &mut self.builder.func);
let mut arg_values = Vec::new();
for arg in args {
arg_values.push(self.translate_expr(arg))
}
let call = self.builder.ins().call(local_callee, &arg_values);
self.builder.inst_results(call)[0]
}
fn translate_global_data_addr(&mut self, name: String) -> Value {
let sym = self
.module
.declare_data(&name, Linkage::Export, true, false, None)
.expect("problem declaring data object");
let local_id = self
.module
.declare_data_in_func(sym, &mut self.builder.func);
let pointer = self.module.target_config().pointer_type();
self.builder.ins().symbol_value(pointer, local_id)
}
}
fn declare_variables(
int: types::Type,
builder: &mut FunctionBuilder,
params: &[String],
the_return: &str,
stmts: &[Expr],
entry_block: Block,
) -> HashMap<String, Variable> {
let mut variables = HashMap::new();
let mut index = 0;
for (i, name) in params.iter().enumerate() {
// TODO: cranelift_frontend should really have an API to make it easy to set
// up param variables.
let val = builder.block_params(entry_block)[i];
let var = declare_variable(int, builder, &mut variables, &mut index, name);
builder.def_var(var, val);
}
let zero = builder.ins().iconst(int, 0);
let return_variable = declare_variable(int, builder, &mut variables, &mut index, the_return);
builder.def_var(return_variable, zero);
for expr in stmts {
declare_variables_in_stmt(int, builder, &mut variables, &mut index, expr);
}
variables
}
/// Recursively descend through the AST, translating all implicit
/// variable declarations.
fn declare_variables_in_stmt(
int: types::Type,
builder: &mut FunctionBuilder,
variables: &mut HashMap<String, Variable>,
index: &mut usize,
expr: &Expr,
) {
match *expr {
Expr::Assign(ref name, _) => {
declare_variable(int, builder, variables, index, name);
}
Expr::IfElse(ref _condition, ref then_body, ref else_body) => {
for stmt in then_body {
declare_variables_in_stmt(int, builder, variables, index, &stmt);
}
for stmt in else_body {
declare_variables_in_stmt(int, builder, variables, index, &stmt);
}
}
Expr::WhileLoop(ref _condition, ref loop_body) => {
for stmt in loop_body {
declare_variables_in_stmt(int, builder, variables, index, &stmt);
}
}
_ => (),
}
}
/// Declare a single variable declaration.
fn declare_variable(
int: types::Type,
builder: &mut FunctionBuilder,
variables: &mut HashMap<String, Variable>,
index: &mut usize,
name: &str,
) -> Variable {
let var = Variable::new(*index);
if !variables.contains_key(name) {
variables.insert(name.into(), var);
builder.declare_var(var, int);
*index += 1;
}
var
} | |
EC2-VPCEndpointService.go | package resources
// Code generated by go generate; DO NOT EDIT.
// It's generated by "github.com/KablamoOSS/kombustion/generate"
import (
"fmt"
"github.com/KablamoOSS/kombustion/types"
yaml "github.com/KablamoOSS/yaml"
)
// EC2VPCEndpointService Documentation: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ec2-vpcendpointservice.html
type EC2VPCEndpointService struct {
Type string `yaml:"Type"`
Properties EC2VPCEndpointServiceProperties `yaml:"Properties"`
Condition interface{} `yaml:"Condition,omitempty"`
Metadata interface{} `yaml:"Metadata,omitempty"`
DependsOn interface{} `yaml:"DependsOn,omitempty"`
}
// EC2VPCEndpointService Properties
type EC2VPCEndpointServiceProperties struct {
AcceptanceRequired interface{} `yaml:"AcceptanceRequired,omitempty"`
NetworkLoadBalancerArns interface{} `yaml:"NetworkLoadBalancerArns"`
}
// NewEC2VPCEndpointService constructor creates a new EC2VPCEndpointService
func NewEC2VPCEndpointService(properties EC2VPCEndpointServiceProperties, deps ...interface{}) EC2VPCEndpointService {
return EC2VPCEndpointService{
Type: "AWS::EC2::VPCEndpointService",
Properties: properties,
DependsOn: deps,
}
}
// ParseEC2VPCEndpointService parses EC2VPCEndpointService
func ParseEC2VPCEndpointService(
name string,
data string,
) (
source string,
conditions types.TemplateObject,
metadata types.TemplateObject,
mappings types.TemplateObject,
outputs types.TemplateObject,
parameters types.TemplateObject,
resources types.TemplateObject,
transform types.TemplateObject,
errors []error,
) {
source = "kombustion-core-resources"
var resource EC2VPCEndpointService
err := yaml.Unmarshal([]byte(data), &resource)
if err != nil {
errors = append(errors, err)
return
}
if validateErrs := resource.Properties.Validate(); len(errors) > 0 {
errors = append(errors, validateErrs...)
return
} | return
}
// ParseEC2VPCEndpointService validator
func (resource EC2VPCEndpointService) Validate() []error {
return resource.Properties.Validate()
}
// ParseEC2VPCEndpointServiceProperties validator
func (resource EC2VPCEndpointServiceProperties) Validate() []error {
errors := []error{}
if resource.NetworkLoadBalancerArns == nil {
errors = append(errors, fmt.Errorf("Missing required field 'NetworkLoadBalancerArns'"))
}
return errors
} |
resources = types.TemplateObject{name: resource}
|
error.rs | use {
crate::{
instruction::InstructionError, message::SanitizeMessageError, sanitize::SanitizeError,
},
serde::Serialize,
thiserror::Error,
};
/// Reasons a transaction might be rejected.
#[derive(
Error, Serialize, Deserialize, Debug, PartialEq, Eq, Clone, AbiExample, AbiEnumVisitor,
)]
pub enum TransactionError {
/// An account is already being processed in another transaction in a way
/// that does not support parallelism
#[error("Account in use")]
AccountInUse,
/// A `Pubkey` appears twice in the transaction's `account_keys`. Instructions can reference
/// `Pubkey`s more than once but the message must contain a list with no duplicate keys
#[error("Account loaded twice")]
AccountLoadedTwice,
/// Attempt to debit an account but found no record of a prior credit.
#[error("Attempt to debit an account but found no record of a prior credit.")]
AccountNotFound,
/// Attempt to load a program that does not exist
#[error("Attempt to load a program that does not exist")]
ProgramAccountNotFound,
/// The from `Pubkey` does not have sufficient balance to pay the fee to schedule the transaction
#[error("Insufficient funds for fee")]
InsufficientFundsForFee,
/// This account may not be used to pay transaction fees
#[error("This account may not be used to pay transaction fees")]
InvalidAccountForFee,
/// The bank has seen this transaction before. This can occur under normal operation
/// when a UDP packet is duplicated, as a user error from a client not updating
/// its `recent_blockhash`, or as a double-spend attack.
#[error("This transaction has already been processed")]
AlreadyProcessed,
/// The bank has not seen the given `recent_blockhash` or the transaction is too old and
/// the `recent_blockhash` has been discarded.
#[error("Blockhash not found")]
BlockhashNotFound,
/// An error occurred while processing an instruction. The first element of the tuple
/// indicates the instruction index in which the error occurred.
#[error("Error processing Instruction {0}: {1}")]
InstructionError(u8, InstructionError),
/// Loader call chain is too deep
#[error("Loader call chain is too deep")]
CallChainTooDeep,
/// Transaction requires a fee but has no signature present
#[error("Transaction requires a fee but has no signature present")]
MissingSignatureForFee,
/// Transaction contains an invalid account reference
#[error("Transaction contains an invalid account reference")]
InvalidAccountIndex,
/// Transaction did not pass signature verification
#[error("Transaction did not pass signature verification")]
SignatureFailure,
/// This program may not be used for executing instructions
#[error("This program may not be used for executing instructions")]
InvalidProgramForExecution,
/// Transaction failed to sanitize accounts offsets correctly
/// implies that account locks are not taken for this TX, and should
/// not be unlocked.
#[error("Transaction failed to sanitize accounts offsets correctly")]
SanitizeFailure,
#[error("Transactions are currently disabled due to cluster maintenance")]
ClusterMaintenance,
/// Transaction processing left an account with an outstanding borrowed reference
#[error("Transaction processing left an account with an outstanding borrowed reference")]
AccountBorrowOutstanding,
/// Transaction would exceed max Block Cost Limit
#[error("Transaction would exceed max Block Cost Limit")]
WouldExceedMaxBlockCostLimit,
/// Transaction version is unsupported
#[error("Transaction version is unsupported")]
UnsupportedVersion,
/// Transaction loads a writable account that cannot be written
#[error("Transaction loads a writable account that cannot be written")]
InvalidWritableAccount,
/// Transaction would exceed max account limit within the block
#[error("Transaction would exceed max account limit within the block")]
WouldExceedMaxAccountCostLimit,
/// Transaction would exceed account data limit within the block
#[error("Transaction would exceed account data limit within the block")]
WouldExceedAccountDataBlockLimit,
/// Transaction locked too many accounts
#[error("Transaction locked too many accounts")]
TooManyAccountLocks,
/// Address lookup table not found
#[error("Transaction loads an address table account that doesn't exist")]
AddressLookupTableNotFound,
/// Attempted to lookup addresses from an account owned by the wrong program
#[error("Transaction loads an address table account with an invalid owner")]
InvalidAddressLookupTableOwner,
/// Attempted to lookup addresses from an invalid account
#[error("Transaction loads an address table account with invalid data")]
InvalidAddressLookupTableData,
/// Address table lookup uses an invalid index
#[error("Transaction address table lookup uses an invalid index")]
InvalidAddressLookupTableIndex,
/// Transaction leaves an account with a lower balance than rent-exempt minimum
#[error(
"Transaction leaves an account with data with a lower balance than rent-exempt minimum"
)]
InvalidRentPayingAccount,
/// Transaction would exceed max Vote Cost Limit
#[error("Transaction would exceed max Vote Cost Limit")]
WouldExceedMaxVoteCostLimit,
/// Transaction would exceed total account data limit
#[error("Transaction would exceed total account data limit")]
WouldExceedAccountDataTotalLimit,
}
impl From<SanitizeError> for TransactionError {
fn from(_: SanitizeError) -> Self {
Self::SanitizeFailure
}
}
impl From<SanitizeMessageError> for TransactionError {
fn from(_err: SanitizeMessageError) -> Self {
Self::SanitizeFailure
}
}
#[derive(Debug, Error, PartialEq, Eq, Clone)]
pub enum AddressLookupError {
/// Attempted to lookup addresses from a table that does not exist
#[error("Attempted to lookup addresses from a table that does not exist")]
LookupTableAccountNotFound,
/// Attempted to lookup addresses from an account owned by the wrong program
#[error("Attempted to lookup addresses from an account owned by the wrong program")]
InvalidAccountOwner,
/// Attempted to lookup addresses from an invalid account
#[error("Attempted to lookup addresses from an invalid account")]
InvalidAccountData,
/// Address lookup contains an invalid index
#[error("Address lookup contains an invalid index")]
InvalidLookupIndex,
}
impl From<AddressLookupError> for TransactionError {
fn from(err: AddressLookupError) -> Self |
}
| {
match err {
AddressLookupError::LookupTableAccountNotFound => Self::AddressLookupTableNotFound,
AddressLookupError::InvalidAccountOwner => Self::InvalidAddressLookupTableOwner,
AddressLookupError::InvalidAccountData => Self::InvalidAddressLookupTableData,
AddressLookupError::InvalidLookupIndex => Self::InvalidAddressLookupTableIndex,
}
} |
solution.py | import json
import uuid
from collections import OrderedDict
from ... import path
from ...iterutils import first
__all__ = ['SlnBuilder', 'SlnElement', 'SlnVariable', 'Solution', 'UuidMap']
class SlnElement:
def __init__(self, name, arg=None, value=None):
if (arg is None) != (value is None):
raise TypeError('if arg is passed, value must be too')
self.name = name
self.arg = arg
self.value = value
self.children = []
def __call__(self, *args):
return self.extend(args)
def append(self, item):
self.children.append(item)
return self
def extend(self, args):
self.children.extend(args)
return self
def write(self, out, depth=0):
out.write('\t' * depth)
out.write(self.name)
if self.arg:
out.write('({}) = {}'.format(self.arg, self.value))
out.write('\n')
for i in self.children:
i.write(out, depth + 1)
out.write('\t' * depth + 'End' + self.name + '\n')
class SlnVariable:
def __init__(self, name, value):
self.name = name
self.value = value
def write(self, out, depth=0):
out.write('\t' * depth + '{} = {}\n'.format(self.name, self.value))
class SlnBuilder:
def __init__(self):
pass
def __call__(self, *args, **kwargs):
return SlnElement(*args, **kwargs)
def __getattribute__(self, name):
def closure(*args, **kwargs):
return SlnElement(name, *args, **kwargs)
return closure
class Solution:
def __init__(self, uuids):
self.uuid = uuids['']
self._uuids = uuids
self._projects = OrderedDict()
def __setitem__(self, key, value):
value.set_uuid(self._uuids)
self._projects[key] = value
def __getitem__(self, key):
return self._projects[key]
def set_default(self, key):
if key not in self._projects:
return
new_projects = OrderedDict([ ('key', self._projects.pop(key)) ])
new_projects.update(self._projects)
self._projects = new_projects
def __iter__(self):
return iter(self._projects.values())
def __contains__(self, key):
return key in self._projects
def dependencies(self, deps):
# By definition, a dependency for an edge must already be defined by
# the time the edge is created, so we can map *all* the dependencies to
# their associated projects by looking at the projects we've already
# created.
dependencies = []
for dep in deps:
if not dep.creator:
continue
dep_output = first(dep.creator.public_output)
if dep_output not in self:
raise RuntimeError('unknown dependency for {!r}'.format(dep))
dependencies.append(self[dep_output])
return dependencies
@property
def uuid_str(self):
return uuid_str(self.uuid)
def write(self, out):
S = SlnBuilder()
Var = SlnVariable
out.write('Microsoft Visual Studio Solution File, Format Version ' +
'12.00\n')
out.write('# Visual Studio 14\n')
Var('VisualStudioVersion', '14.0.22609.0').write(out)
Var('MinimumVisualStudioVersion', '10.0.40219.1').write(out)
configs = set()
project_info = []
for p in self:
path_vars = {path.Root.builddir: None}
proj = S.Project(
'"{}"'.format(self.uuid_str),
'"{name}", "{path}", "{uuid}"'.format(
name=p.name, path=p.path.string(path_vars), uuid=p.uuid_str
)
)
if p.dependencies:
proj.append(
S.ProjectSection('ProjectDependencies', 'postProject')
.extend(Var(i.uuid_str, i.uuid_str)
for i in p.dependencies)
)
proj.write(out)
configs.add(p.config_plat)
project_info.append(Var('{uuid}.{cfg}.ActiveCfg'.format(
uuid=p.uuid_str, cfg=p.config_plat
), p.real_config_plat))
project_info.append(Var('{uuid}.{cfg}.Build.0'.format(
uuid=p.uuid_str, cfg=p.config_plat
), p.real_config_plat))
S.Global()(
S.GlobalSection('SolutionConfigurationPlatforms', 'preSolution')
.extend(Var(i, i) for i in configs),
S.GlobalSection('ProjectConfigurationPlatforms', 'postSolution')
.extend(project_info),
S.GlobalSection('SolutionProperties', 'preSolution')(
Var('HideSolutionNode', 'FALSE')
)
).write(out)
def uuid_str(uuid):
return '{{{}}}'.format(str(uuid).upper())
class UuidMap:
version = 1
def __init__(self, path):
self._path = path
self._seen = set()
try:
self._map = self._load(path)
except IOError:
self._map = {}
def | (self, key):
self._seen.add(key)
if key in self._map:
return self._map[key]
else:
u = uuid.uuid4()
self._map[key] = u
return u
@classmethod
def _load(cls, path):
with open(path) as inp:
state = json.load(inp)
if state['version'] > cls.version:
raise ValueError('saved version exceeds expected version')
return { k: uuid.UUID(hex=v) for k, v in state['map'].items() }
def save(self, path=None):
with open(path or self._path, 'w') as out:
# Only save the UUIDs we saw this time. Skip ones we didn't see.
seenmap = { k: v.hex for k, v in self._map.items()
if k in self._seen }
json.dump({
'version': self.version,
'map': seenmap,
}, out)
| __getitem__ |
3-bad-lsp.py | class Animal(object):
pass
class Duck(Animal):
pass
class Snake(Animal):
pass
class Platypus(Animal):
pass
def can_quack(animal):
|
if __name__ == '__main__':
print(can_quack(Duck()))
print(can_quack(Snake()))
print(can_quack(Platypus()))
| if isinstance(animal, Duck):
return True
elif isinstance(animal, Snake):
return False
else:
raise RuntimeError('Unknown animal!') |
get-video-id.d.ts | export = index;
declare function index(str: any): any; |
||
6_Compression.py | # Create a function that implements a basic compression algorithm by counting the chars
# thtat are present in a string, if the result string is longer than input
# then return original input. | # aaaaaaaaaaba: a10b1a1
### Note: Don't use extra space
import unittest
from collections import Counter
def compress2(s1):
newStr = []
count = 0
for i in range(len(s1)):
# Explanation
# the i != 0 is used to deal with the first character.
# we could have done but requirs extra code:
# char = s1[0] # requires to check if the s1 is not empty
# - or -
# char = '' # requires to check if char != ''
if i != 0 and s1[i] != s1[i-1]:
newStr.append(s1[i-1] + str(count))
count = 0
count += 1
newStr.append(s1[-1] + str(count)) # we do this to deal with the last characters
return min(s1, ''.join(newStr), key=len)
def compress(s1):
newStr = ''
char = ''
count = 0
for i in range(len(s1)):
if char != s1[i]:
if char != '': # we do this to deal with the initial case
newStr += char + str(count)
char = s1[i]
count = 1
else:
count += 1
newStr += char + str(count) # we do this to deal with the last characters
if len(newStr) > len(s1):
return s1
return newStr
class Test(unittest.TestCase):
valid = (
('aaabcccccaaa', 'a3b1c5a3'),
('abcdef', 'abcdef'),
('aaaaaaaaaaba', 'a10b1a1')
)
def test(self):
for [input, expected] in self.valid:
print(input,' vs ',expected)
result = compress(input)
self.assertEqual(result, expected)
if __name__ == "__main__":
unittest.main() | #
# Examples:
# aaabcccccaaa: a3b1c5a3
# abcdef: abcdef |
Transactor.ts | import { BigNumber, BigNumberish } from '@ethersproject/bignumber'
import { hexlify } from '@ethersproject/bytes'
import { Contract } from '@ethersproject/contracts'
import { Deferrable } from '@ethersproject/properties'
import { JsonRpcSigner, TransactionRequest } from '@ethersproject/providers'
import { parseUnits } from '@ethersproject/units'
import { notification } from 'antd'
import Notify, { InitOptions, TransactionEvent } from 'bnc-notify'
import { NetworkContext } from 'contexts/networkContext'
import { ThemeContext } from 'contexts/themeContext'
import { useCallback, useContext } from 'react'
export type TransactorCallback = (
e?: TransactionEvent,
signer?: JsonRpcSigner,
) => void
export type TransactorOptions = {
value?: BigNumberish
onDone?: VoidFunction
onConfirmed?: TransactorCallback
onCancelled?: TransactorCallback
}
export type Transactor = (
contract: Contract,
functionName: string,
args: any[],
options?: TransactorOptions,
) => Promise<boolean>
export type TransactorInstance<T> = (
args: T,
txOpts?: Omit<TransactorOptions, 'value'>,
) => ReturnType<Transactor>
// Check user has their wallet connected. If not, show select wallet prompt
const checkWalletConnected = (
onSelectWallet: VoidFunction,
userAddress?: string,
) => {
if (!userAddress && onSelectWallet) {
onSelectWallet()
}
}
// wrapper around BlockNative's Notify.js
// https://docs.blocknative.com/notify
export function useTransactor({
gasPrice,
}: {
gasPrice?: BigNumber
}): Transactor | undefined {
const {
signingProvider: provider,
onSelectWallet,
userAddress,
} = useContext(NetworkContext)
const { isDarkMode } = useContext(ThemeContext)
return useCallback(
async (
contract: Contract,
functionName: string,
args: any[],
options?: TransactorOptions,
) => {
if (!onSelectWallet) return false
if (!provider) {
onSelectWallet()
if (options?.onDone) options.onDone()
return false
}
checkWalletConnected(onSelectWallet, userAddress)
if (!provider) return false
const signer = provider.getSigner()
const network = await provider.getNetwork()
const notifyOpts: InitOptions = {
dappId: process.env.REACT_APP_BLOCKNATIVE_API_KEY,
system: 'ethereum',
networkId: network.chainId,
darkMode: isDarkMode,
transactionHandler: txInformation => {
console.info('HANDLE TX', txInformation)
if (options && txInformation.transaction.status === 'confirmed') {
options.onConfirmed && options.onConfirmed(txInformation, signer)
options.onDone && options.onDone()
}
if (options && txInformation.transaction.status === 'cancelled') {
options.onCancelled && options.onCancelled(txInformation, signer)
}
},
}
const notify = Notify(notifyOpts)
let etherscanNetwork = ''
if (network.name && network.chainId > 1) {
etherscanNetwork = network.name + '.'
}
let etherscanTxUrl = 'https://' + etherscanNetwork + 'etherscan.io/tx/'
if (network.chainId === 100) {
etherscanTxUrl = 'https://blockscout.com/poa/xdai/tx/'
}
const tx: Deferrable<TransactionRequest> =
options?.value !== undefined
? contract[functionName](...args, { value: options.value })
: contract[functionName](...args)
const reportArgs = Object.values(contract.interface.functions)
.find(f => f.name === functionName)
?.inputs.reduce(
(acc, input, i) => ({
...acc,
[input.name]: args[i],
}),
{},
)
console.info(
'🧃 Calling ' + functionName + '() with args:',
reportArgs,
tx,
)
try {
let result
if (tx instanceof Promise) {
console.info('AWAITING TX', tx)
result = await tx
} else {
console.info('RUNNING TX', tx)
if (!tx.gasPrice) tx.gasPrice = gasPrice ?? parseUnits('4.1', 'gwei')
if (!tx.gasLimit) tx.gasLimit = hexlify(120000)
result = await signer.sendTransaction(tx)
await result.wait()
}
console.info('RESULT:', result)
// if it is a valid Notify.js network, use that, if not, just send a default notification
const isNotifyNetwork =
[1, 3, 4, 5, 42, 100].indexOf(network.chainId) >= 0
if (isNotifyNetwork) {
const { emitter } = notify.hash(result.hash)
emitter.on('all', transaction => ({
onclick: () => window.open(etherscanTxUrl + transaction.hash),
}))
} else {
console.info('LOCAL TX SENT', result.hash)
if (result.confirmations) {
options?.onConfirmed && options.onConfirmed(result, signer)
} else {
options?.onCancelled && options.onCancelled(result, signer)
}
}
options?.onDone && options.onDone()
return true
} catch (e) {
const message = (e as Error).message
console.error('Transaction Error:', message)
let description: string
try {
let json = message.split('(error=')[1]
json = json.split(', method=')[0]
description = JSON.parse(json).message || message
} catch (_) {
description = message
}
notification.error({
key: new Date().valueOf().toString(),
message: 'Transaction failed',
description,
duration: 0,
})
options?.onDone && options.onDone()
return false
}
},
[onSelectWallet, provider, isDarkMode, gasPrice, userAddress],
) | } |
|
oghb.go | package main
import (
"bytes"
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"net"
"net/http"
"net/url"
"time"
)
var TIMEOUT = 30
var apiKey *string
var name *string
var apiUrl *string
var action *string
var description *string
var interval *int
var intervalUnit *string
var delete *bool
func main() {
parseFlags()
if *action == "start" {
startHeartbeat()
} else if *action == "stop" {
stopHeartbeat()
} else if *action == "send" {
sendHeartbeat()
} else {
panic("Unknown action flag; use start or stop")
}
}
func parseFlags() {
action = flag.String("action", "", "start, stop or send")
apiKey = flag.String("apiKey", "", "API key")
name = flag.String("name", "", "heartbeat name")
apiUrl = flag.String("apiUrl", "https://api.opsgenie.com", "OpsGenie API url")
description = flag.String("description", "", "heartbeat description")
interval = flag.Int("timetoexpire", 10, "amount of time OpsGenie waits for a send request before creating alert")
intervalUnit = flag.String("intervalUnit", "minutes", "minutes, hours or days")
delete = flag.Bool("delete", false, "delete the heartbeat on stop")
flag.Parse()
if *action == "" {
panic("-action flag must be provided")
}
if *apiKey == "" {
panic("-apiKey flag must be provided")
}
if *name == "" {
panic("-name flag must be provided")
}
}
func startHeartbeat() {
heartbeat := getHeartbeat()
if heartbeat == nil {
addHeartbeat()
} else {
updateHeartbeatWithEnabledTrue(*heartbeat)
}
sendHeartbeat()
}
func getHeartbeat() *heartbeat {
var requestParams = make(map[string]string)
requestParams["apiKey"] = *apiKey
requestParams["name"] = *name
statusCode, responseBody := doHttpRequest("GET", "/v1/json/heartbeat/", requestParams, nil)
if statusCode == 200 {
heartbeat := &heartbeat{}
err := json.Unmarshal(responseBody, &heartbeat)
if err != nil {
panic(err)
}
fmt.Println("Successfully retrieved heartbeat [" + *name + "]")
return heartbeat
} else {
errorResponse := createErrorResponse(responseBody)
if statusCode == 400 && errorResponse.Code == 17 {
fmt.Println("Heartbeat [" + *name + "] doesn't exist")
return nil
}
panic("Failed to get heartbeat [" + *name + "]; response from OpsGenie:" + errorResponse.Message)
}
}
func addHeartbeat() {
var contentParams = make(map[string]interface{})
contentParams["apiKey"] = apiKey
contentParams["name"] = name
if *description != "" {
contentParams["description"] = description
}
if *interval != 0 {
contentParams["interval"] = interval
}
if *intervalUnit != "" {
contentParams["intervalUnit"] = intervalUnit
}
statusCode, responseBody := doHttpRequest("POST", "/v1/json/heartbeat/", nil, contentParams)
if statusCode != 200 {
errorResponse := createErrorResponse(responseBody)
panic("Failed to add heartbeat [" + *name + "]; response from OpsGenie:" + errorResponse.Message)
}
fmt.Println("Successfully added heartbeat [" + *name + "]")
}
func updateHeartbeatWithEnabledTrue(heartbeat heartbeat) {
var contentParams = make(map[string]interface{})
contentParams["apiKey"] = apiKey
contentParams["name"] = heartbeat.Name
contentParams["enabled"] = true
if *description != "" {
contentParams["description"] = description
}
if *interval != 0 {
contentParams["interval"] = interval
}
if *intervalUnit != "" {
contentParams["intervalUnit"] = intervalUnit
}
statusCode, responseBody := doHttpRequest("POST", "/v1/json/heartbeat", nil, contentParams)
if statusCode != 200 {
errorResponse := createErrorResponse(responseBody)
panic("Failed to update heartbeat [" + *name + "]; response from OpsGenie:" + errorResponse.Message)
}
fmt.Println("Successfully enabled and updated heartbeat [" + *name + "]")
}
func sendHeartbeat() {
var contentParams = make(map[string]interface{})
contentParams["apiKey"] = apiKey
contentParams["name"] = name
statusCode, responseBody := doHttpRequest("POST", "/v1/json/heartbeat/send", nil, contentParams)
if statusCode != 200 {
errorResponse := createErrorResponse(responseBody)
panic("Failed to send heartbeat [" + *name + "]; response from OpsGenie:" + errorResponse.Message)
}
fmt.Println("Successfully sent heartbeat [" + *name + "]")
}
func stopHeartbeat() {
if *delete == true {
deleteHeartbeat()
} else {
disableHeartbeat()
}
}
func deleteHeartbeat() {
var requestParams = make(map[string]string)
requestParams["apiKey"] = *apiKey
requestParams["name"] = *name
statusCode, responseBody := doHttpRequest("DELETE", "/v1/json/heartbeat", requestParams, nil)
if statusCode != 200 {
errorResponse := createErrorResponse(responseBody)
panic("Failed to delete heartbeat [" + *name + "]; response from OpsGenie:" + errorResponse.Message)
}
fmt.Println("Successfully deleted heartbeat [" + *name + "]")
}
func disableHeartbeat() {
var contentParams = make(map[string]interface{})
contentParams["apiKey"] = apiKey
contentParams["name"] = name
statusCode, responseBody := doHttpRequest("POST", "/v1/json/heartbeat/disable", nil, contentParams)
if statusCode != 200 {
errorResponse := createErrorResponse(responseBody)
panic("Failed to disable heartbeat [" + *name + "]; response from OpsGenie:" + errorResponse.Message)
}
fmt.Println("Successfully disabled heartbeat [" + *name + "]")
}
func createErrorResponse(responseBody []byte) ErrorResponse {
errResponse := &ErrorResponse{}
err := json.Unmarshal(responseBody, &errResponse)
if err != nil {
panic(err)
}
return *errResponse
}
func doHttpRequest(method string, urlSuffix string, requestParameters map[string]string, contentParameters map[string]interface{}) (int, []byte) {
var buf, _ = json.Marshal(contentParameters)
body := bytes.NewBuffer(buf)
var Url *url.URL
Url, err := url.Parse(*apiUrl + urlSuffix)
if err != nil {
panic(err)
}
parameters := url.Values{}
for k, v := range requestParameters {
parameters.Add(k, v)
}
Url.RawQuery = parameters.Encode()
var request *http.Request
var _ error
if contentParameters == nil {
request, _ = http.NewRequest(method, Url.String(), nil)
} else {
request, _ = http.NewRequest(method, Url.String(), body)
}
client := getHttpClient(TIMEOUT)
resp, error := client.Do(request)
if resp != nil {
defer resp.Body.Close()
}
if error == nil {
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err == nil {
return resp.StatusCode, body
}
fmt.Println("Couldn't read the response from opsgenie")
panic(err)
} else {
fmt.Println("Couldn't send the request to opsgenie")
panic(error)
}
return 0, nil
}
func getHttpClient(seconds int) *http.Client {
client := &http.Client{
Transport: &http.Transport{
Proxy: http.ProxyFromEnvironment,
Dial: func(netw, addr string) (net.Conn, error) {
conn, err := net.DialTimeout(netw, addr, time.Second*time.Duration(seconds))
if err != nil {
return nil, err
}
conn.SetDeadline(time.Now().Add(time.Second * time.Duration(seconds)))
return conn, nil
},
}, |
type heartbeat struct {
Name string `json:"name"`
}
type ErrorResponse struct {
Code int `json:"code"`
Message string `json:"error"`
} | }
return client
} |
flowtesting.go | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
package testdata
import (
"context"
"io"
"io/ioutil"
"net/http"
"go.amzn.com/lambda/appctx"
"go.amzn.com/lambda/core"
"go.amzn.com/lambda/core/statejson"
"go.amzn.com/lambda/interop"
"go.amzn.com/lambda/rapi/rendering"
"go.amzn.com/lambda/telemetry"
"go.amzn.com/lambda/testdata/mockthread"
)
type MockInteropServer struct {
Response []byte
ErrorResponse *interop.ErrorResponse
ActiveInvokeID string
}
// StartAcceptingDirectInvokes
func (i *MockInteropServer) StartAcceptingDirectInvokes() error { return nil }
// SendResponse writes response to a shared memory.
func (i *MockInteropServer) SendResponse(invokeID string, reader io.Reader) error {
bytes, err := ioutil.ReadAll(reader)
if err != nil {
return err
}
if len(bytes) > interop.MaxPayloadSize {
return &interop.ErrorResponseTooLarge{
ResponseSize: len(bytes),
MaxResponseSize: interop.MaxPayloadSize,
}
}
i.Response = bytes
return nil
}
// SendErrorResponse writes error response to a shared memory and sends GIRD FAULT.
func (i *MockInteropServer) SendErrorResponse(invokeID string, response *interop.ErrorResponse) error {
i.ErrorResponse = response
return nil
}
func (i *MockInteropServer) GetCurrentInvokeID() string {
return i.ActiveInvokeID
}
func (i *MockInteropServer) CommitResponse() error { return nil }
// SendRunning sends GIRD RUNNING.
func (i *MockInteropServer) SendRunning(*interop.Running) error { return nil }
// SendDone sends GIRD DONE.
func (i *MockInteropServer) SendDone(*interop.Done) error { return nil }
// SendDoneFail sends GIRD DONEFAIL.
func (i *MockInteropServer) SendDoneFail(*interop.DoneFail) error { return nil }
// StartChan returns Start emitter
func (i *MockInteropServer) StartChan() <-chan *interop.Start { return nil }
// InvokeChan returns Invoke emitter
func (i *MockInteropServer) InvokeChan() <-chan *interop.Invoke { return nil }
// ResetChan returns Reset emitter
func (i *MockInteropServer) ResetChan() <-chan *interop.Reset { return nil }
// ShutdownChan returns Shutdown emitter
func (i *MockInteropServer) ShutdownChan() <-chan *interop.Shutdown { return nil }
// TransportErrorChan emits errors if there was parsing/connection issue
func (i *MockInteropServer) TransportErrorChan() <-chan error { return nil }
func (i *MockInteropServer) Clear() {}
func (i *MockInteropServer) IsResponseSent() bool {
return !(i.Response == nil && i.ErrorResponse == nil)
}
func (i *MockInteropServer) SendRuntimeReady() error { return nil }
func (i *MockInteropServer) SetInternalStateGetter(isd interop.InternalStateGetter) {}
func (m *MockInteropServer) Init(i *interop.Start, invokeTimeoutMs int64) {}
func (m *MockInteropServer) Invoke(w http.ResponseWriter, i *interop.Invoke) error { return nil }
func (m *MockInteropServer) Shutdown(shutdown *interop.Shutdown) *statejson.InternalStateDescription { return nil }
// FlowTest provides configuration for tests that involve synchronization flows.
type FlowTest struct {
AppCtx appctx.ApplicationContext
InitFlow core.InitFlowSynchronization
InvokeFlow core.InvokeFlowSynchronization
RegistrationService core.RegistrationService
RenderingService *rendering.EventRenderingService
Runtime *core.Runtime
InteropServer *MockInteropServer
TelemetryService *MockNoOpTelemetryService
}
// ConfigureForInit initialize synchronization gates and states for init.
func (s *FlowTest) ConfigureForInit() {
s.RegistrationService.PreregisterRuntime(s.Runtime)
}
// ConfigureForInvoke initialize synchronization gates and states for invoke.
func (s *FlowTest) ConfigureForInvoke(ctx context.Context, invoke *interop.Invoke) {
s.InteropServer.ActiveInvokeID = invoke.ID
s.InvokeFlow.InitializeBarriers()
s.RenderingService.SetRenderer(rendering.NewInvokeRenderer(ctx, invoke, telemetry.GetCustomerTracingHeader))
}
// MockNoOpTelemetryService is a no-op telemetry API used in tests where it does not matter
type MockNoOpTelemetryService struct{}
// Subscribe writes response to a shared memory
func (m *MockNoOpTelemetryService) Subscribe(agentName string, body io.Reader, headers map[string][]string) ([]byte, int, map[string][]string, error) {
return []byte(`{}`), http.StatusOK, map[string][]string{}, nil
}
func (s *MockNoOpTelemetryService) RecordCounterMetric(metricName string, count int) {
// NOOP
}
func (s *MockNoOpTelemetryService) FlushMetrics() interop.LogsAPIMetrics {
return interop.LogsAPIMetrics(map[string]int{})
}
func (m *MockNoOpTelemetryService) Clear() {
// NOOP
}
func (m *MockNoOpTelemetryService) TurnOff() {
// NOOP
}
// NewFlowTest returns new FlowTest configuration.
func NewFlowTest() *FlowTest | {
appCtx := appctx.NewApplicationContext()
initFlow := core.NewInitFlowSynchronization()
invokeFlow := core.NewInvokeFlowSynchronization()
registrationService := core.NewRegistrationService(initFlow, invokeFlow)
renderingService := rendering.NewRenderingService()
runtime := core.NewRuntime(initFlow, invokeFlow)
runtime.ManagedThread = &mockthread.MockManagedThread{}
interopServer := &MockInteropServer{}
appctx.StoreInteropServer(appCtx, interopServer)
return &FlowTest{
AppCtx: appCtx,
InitFlow: initFlow,
InvokeFlow: invokeFlow,
RegistrationService: registrationService,
RenderingService: renderingService,
TelemetryService: &MockNoOpTelemetryService{},
Runtime: runtime,
InteropServer: interopServer,
}
} |
|
hc8_dmaaddr.rs | #[doc = "Reader of register HC8_DMAADDR"]
pub type R = crate::R<u32, super::HC8_DMAADDR>;
#[doc = "Writer for register HC8_DMAADDR"]
pub type W = crate::W<u32, super::HC8_DMAADDR>;
#[doc = "Register HC8_DMAADDR `reset()`'s with value 0"]
impl crate::ResetValue for super::HC8_DMAADDR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `DMAADDR`"]
pub type DMAADDR_R = crate::R<u32, u32>;
#[doc = "Write proxy for field `DMAADDR`"]
pub struct | <'a> {
w: &'a mut W,
}
impl<'a> DMAADDR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);
self.w
}
}
impl R {
#[doc = "Bits 0:31 - Buffer DMA Mode: \\[31:0\\]
DMA Address"]
#[inline(always)]
pub fn dmaaddr(&self) -> DMAADDR_R {
DMAADDR_R::new((self.bits & 0xffff_ffff) as u32)
}
}
impl W {
#[doc = "Bits 0:31 - Buffer DMA Mode: \\[31:0\\]
DMA Address"]
#[inline(always)]
pub fn dmaaddr(&mut self) -> DMAADDR_W {
DMAADDR_W { w: self }
}
}
| DMAADDR_W |
http.go | package main
import (
"bytes"
"fmt"
httpclient "github.com/ddliu/go-httpclient"
"io"
"io/ioutil"
"mime/multipart"
"net/http"
"os"
)
func init() {
httpclient.Defaults(httpclient.Map{
httpclient.OPT_UNSAFE_TLS: true,
httpclient.OPT_MAXREDIRS: 0,
})
}
func PostJson(url string, v interface{}) (string, error) {
resp, err := httpclient.PostJson(url, v)
if err != nil {
return "", err
}
respBody, err := resp.ToString()
return respBody, nil
}
func PostForm(url string, headers map[string]string, params map[string]string) (string, []*http.Cookie, error) {
resp, err := httpclient.WithHeaders(headers).Post(url, params)
if err != nil {
return "", nil, err
}
respBody, err := resp.ToString()
return respBody, nil, err
}
func PostFormParams(url string, params map[string]string) (string, error) {
resp, err := httpclient.Post(url, params)
if err != nil {
return "", err
}
respBody, err := resp.ToString()
return respBody, nil
}
func Get(url string, params map[string]interface{}) (string, error) {
resp, err := httpclient.Get(url, params)
if err != nil {
return "", err
}
respBody, err := resp.ToString()
return respBody, nil
}
func GetWithCookieAndHeader(url string, headers map[string]string, cookie []*http.Cookie) (string, []*http.Cookie, error) {
//for i := 0; i < len(cookieStr); i ++ {
// cookie := cookieStr[i]
// index := strings.Index(cookie, "=")
// key := cookie[0:index]
// value := cookie[index+1:len(cookie)]
//
// fmt.Println(key)
// fmt.Println(value)
//
//}
//
//return
resp, err := httpclient.WithHeaders(headers).WithCookie(cookie[0]).Get(url)
if err != nil {
return "", nil, err
}
fmt.Println("cookie: ", resp.Cookies())
respBody, err := resp.ToString()
return respBody, resp.Cookies(), nil
}
func GetUrl(url string) (string, error) {
resp, err := httpclient.Get(url)
if err != nil {
return "", err
}
respBody, err := resp.ToString()
return respBody, nil
}
func | (url string, headers map[string]string) (string, []*http.Cookie, error) {
resp, err := httpclient.WithHeaders(headers).Get(url)
if err != nil {
return "", nil, err
}
fmt.Println("cookie: ", resp.Cookies())
respBody, err := resp.ToString()
return respBody, resp.Cookies(), nil
}
//PostFile 上传文件
func PostFile(fieldname, filename, uri string) ([]byte, error) {
fields := []MultipartFormField{
{
IsFile: true,
Fieldname: fieldname,
Filename: filename,
},
}
return PostMultipartForm(fields, uri)
}
//MultipartFormField 保存文件或其他字段信息
type MultipartFormField struct {
IsFile bool
Fieldname string
Value []byte
Filename string
}
//PostMultipartForm 上传文件或其他多个字段
func PostMultipartForm(fields []MultipartFormField, uri string) (respBody []byte, err error) {
bodyBuf := &bytes.Buffer{}
bodyWriter := multipart.NewWriter(bodyBuf)
for _, field := range fields {
if field.IsFile {
fileWriter, e := bodyWriter.CreateFormFile(field.Fieldname, field.Filename)
if e != nil {
err = fmt.Errorf("error writing to buffer , err=%v", e)
return
}
fh, e := os.Open(field.Filename)
if e != nil {
err = fmt.Errorf("error opening file , err=%v", e)
return
}
defer fh.Close()
if _, err = io.Copy(fileWriter, fh); err != nil {
return
}
} else {
partWriter, e := bodyWriter.CreateFormField(field.Fieldname)
if e != nil {
err = e
return
}
valueReader := bytes.NewReader(field.Value)
if _, err = io.Copy(partWriter, valueReader); err != nil {
return
}
}
}
contentType := bodyWriter.FormDataContentType()
bodyWriter.Close()
resp, e := http.Post(uri, contentType, bodyBuf)
if e != nil {
err = e
return
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, err
}
respBody, err = ioutil.ReadAll(resp.Body)
return
}
| GetUrlWithHeaders |
ws_handler.rs | use actix_web::HttpMessage;
use actix_web::Responder;
use actix_web::{http, web, Error, HttpRequest, HttpResponse};
use actix_web_actors::ws;
use crate::application::app::AppState;
use crate::participants::consumer_folder::consumer::Consumer;
use crate::participants::director_folder::director::Director;
use crate::participants::producer_folder::producer::Producer;
use crate::participants::viewer_folder::viewer::Viewer;
use crate::handle_to_app;
pub async fn handle_ws(req: HttpRequest, stream: web::Payload) -> Result<HttpResponse, Error> {
let viewtype: String = req.match_info().get("viewtype").unwrap().parse().unwrap();
let game_id: String = req.match_info().get("game_id").unwrap().parse().unwrap();
let uuid: String = req.match_info().get("uuid").unwrap().parse().unwrap();
let addr = req.app_data::<web::Data<actix::Addr<AppState>>>().unwrap();
match viewtype.as_ref() {
"director" => {
if let Some((game_addr, name)) = addr
.send(handle_to_app::IsRegisteredDirector {
user_id: uuid.clone(),
game_id: game_id.clone(),
})
.await
.unwrap()
{
let director_ws = Director::new(name, game_id.to_string(), game_addr);
let resp = ws::start(director_ws, &req, stream);
println!("{:?}", resp);
return resp;
}
}
"consumer" => {
if let Some((game_addr, name)) = addr
.send(handle_to_app::IsRegisteredPlayer {
user_id: uuid.clone(),
game_id: game_id.clone(),
})
.await
.unwrap()
{
let consumer_ws = Consumer::new(name, game_id.to_string(), game_addr);
let resp = ws::start(consumer_ws, &req, stream);
println!("{:?}", resp);
return resp;
}
}
"producer" => {
if let Some((game_addr, name)) = addr
.send(handle_to_app::IsRegisteredPlayer {
user_id: uuid.clone(),
game_id: game_id.clone(),
})
.await
.unwrap()
{
let producer_ws = Producer::new(name, game_id.to_string(), game_addr);
let resp = ws::start(producer_ws, &req, stream);
println!("{:?}", resp);
return resp;
}
}
"viewer" => {
if let Some((game_addr, name)) = addr
.send(handle_to_app::IsRegisteredViewer {
user_id: uuid.clone(),
game_id: game_id.clone(),
})
.await
.unwrap()
{
let viewer_ws = Viewer::new(name, game_id.to_string(), game_addr);
let resp = ws::start(viewer_ws, &req, stream);
println!("{:?}", resp);
return resp;
}
}
_ => {
return Ok(HttpResponse::build(http::StatusCode::OK).body("Invalid Viewtype"));
}
}
Ok(HttpResponse::build(http::StatusCode::OK)
.body("Failed: possible reasons are no cookies set or no corresponding uuid found"))
}
pub async fn | (req: HttpRequest) -> impl Responder {
return format!(
"{}\n{}\n{}",
req.cookie("viewtype").unwrap().value(),
req.cookie("game_id").unwrap().value(),
req.cookie("uuid").unwrap().value()
);
}
| handle_prep |
wsgi.py | """
WSGI config for delicate_forest_30319 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
| os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'delicate_forest_30319.settings')
application = get_wsgi_application() | from django.core.wsgi import get_wsgi_application
|
models.py | """ Ensure that the models work as intended """
import json
from django.conf import settings
from django.contrib.auth.models import User
from django.test import TestCase
from django.test.utils import override_settings
from mock import patch
from pinpayments.models import (
ConfigError,
CustomerToken,
PinError,
PinTransaction
)
from requests import Response
ENV_MISSING_SECRET = {
'test': {
'key': 'key1',
'host': 'test-api.pin.net.au',
},
}
ENV_MISSING_HOST = {
'test': {
'key': 'key1',
'secret': 'secret1',
},
}
class FakeResponse(Response):
def __init__(self, status_code, content):
super(FakeResponse, self).__init__()
self.status_code = status_code
self._content = content
class CustomerTokenTests(TestCase):
# Need to override the setting so we can delete it, not sure why.
@override_settings(PIN_DEFAULT_ENVIRONMENT=None)
def test_default_environment(self):
"""
Unset PIN_DEFAULT_ENVIRONMENT to test that the environment defaults
to 'test'.
"""
del settings.PIN_DEFAULT_ENVIRONMENT
token = CustomerToken()
token.user = User.objects.create()
token.environment = None
token.save()
self.assertEqual(token.environment, 'test')
class CreateFromCardTokenTests(TestCase):
""" Test the creation of customer tokens from card tokens """
def setUp(self):
""" Common setup for methods """
super(CreateFromCardTokenTests, self).setUp()
self.user = User.objects.create()
self.response_data = json.dumps({
'response': {
'token': '1234',
'email': '[email protected]',
'created_at': '2012-06-22T06:27:33Z',
'card': {
'token': '54321',
'display_number': 'XXXX-XXXX-XXXX-0000',
'scheme': 'master',
'expiry_month': 6,
'expiry_year': 2017,
'name': 'Roland Robot',
'address_line1': '42 Sevenoaks St',
'address_line2': None,
'address_city': 'Lathlain',
'address_postcode': '6454',
'address_state': 'WA',
'address_country': 'Australia',
'primary': None,
}
}
})
self.response_error = json.dumps({
'error': 'invalid_resource',
'error_description':
'One or more parameters were missing or invalid.'
})
@patch('requests.post')
def test_default_environment(self, mock_request):
""" return a default environment """
mock_request.return_value = FakeResponse(200, self.response_data)
token = CustomerToken.create_from_card_token('1234', self.user)
self.assertEqual(token.environment, 'test')
@override_settings(PIN_ENVIRONMENTS={})
@patch('requests.post')
def test_valid_environment(self, mock_request):
""" Check errors are raised with no environments """
mock_request.return_value = FakeResponse(200, self.response_data)
with self.assertRaises(ConfigError):
CustomerToken.create_from_card_token(
'1234', self.user, environment='test'
)
@override_settings(PIN_ENVIRONMENTS=ENV_MISSING_SECRET)
@patch('requests.post')
def test_secret_set(self, mock_request):
""" Check errors are raised when the secret is not set """
mock_request.return_value = FakeResponse(200, self.response_data)
with self.assertRaises(ConfigError):
CustomerToken.create_from_card_token(
'1234', self.user, environment='test'
)
@override_settings(PIN_ENVIRONMENTS=ENV_MISSING_HOST)
@patch('requests.post')
def test_host_set(self, mock_request):
""" Check errors are raised when the host is not set """
mock_request.return_value = FakeResponse(200, self.response_data)
with self.assertRaises(ConfigError):
CustomerToken.create_from_card_token(
'1234', self.user, environment='test'
)
@patch('requests.post')
def test_response_not_json(self, mock_request):
""" Validate non-json response """
mock_request.return_value = FakeResponse(200, '')
with self.assertRaises(PinError):
CustomerToken.create_from_card_token(
'1234', self.user, environment='test'
)
@patch('requests.post')
def test_response_error(self, mock_request):
""" Validate generic error response """
mock_request.return_value = FakeResponse(200, self.response_error)
with self.assertRaises(PinError):
CustomerToken.create_from_card_token(
'1234', self.user, environment='test'
)
@patch('requests.post')
def test_response_success(self, mock_request):
""" Validate successful response """
mock_request.return_value = FakeResponse(200, self.response_data)
customer = CustomerToken.create_from_card_token(
'1234', self.user, environment='test'
)
self.assertIsInstance(customer, CustomerToken)
self.assertEqual(customer.user, self.user)
self.assertEqual(customer.token, '1234')
self.assertEqual(customer.environment, 'test')
self.assertEqual(customer.card_number, 'XXXX-XXXX-XXXX-0000')
self.assertEqual(customer.card_type, 'master')
class PinTransactionTests(TestCase):
""" Transaction construction/init related tests """
def setUp(self):
""" Common setup for methods """
super(PinTransactionTests, self).setUp()
self.transaction = PinTransaction()
self.transaction.card_token = '12345'
self.transaction.ip_address = '127.0.0.1'
self.transaction.amount = 500
self.transaction.currency = 'AUD'
self.transaction.email_address = '[email protected]'
self.transaction.environment = 'test'
# Need to override the setting so we can delete it, not sure why.
@override_settings(PIN_DEFAULT_ENVIRONMENT=None)
def test_save_defaults(self):
"""
Unset PIN_DEFAULT_ENVIRONMENT to test that the environment defaults
to 'test'.
"""
del settings.PIN_DEFAULT_ENVIRONMENT
self.transaction.environment = None
self.transaction.save()
self.assertEqual(self.transaction.environment, 'test')
self.assertTrue(self.transaction.date)
def test_save_notokens(self):
"""
Check that an error is thrown if neither card nor customer token
are provided to the transaction
"""
self.transaction.card_token = None
self.transaction.customer_token = None
self.assertRaises(PinError, self.transaction.save)
def test_valid_environment(self):
"""
Check that errors are thrown when a fake environment is requested
"""
self.transaction.environment = 'this should not exist'
self.assertRaises(PinError, self.transaction.save)
class ProcessTransactionsTests(TestCase):
""" Transaction processing related tests """
def setUp(self):
""" Common setup for methods """
super(ProcessTransactionsTests, self).setUp()
self.transaction = PinTransaction()
self.transaction.card_token = '12345'
self.transaction.ip_address = '127.0.0.1'
self.transaction.amount = 500
self.transaction.currency = 'AUD'
self.transaction.email_address = '[email protected]'
self.transaction.environment = 'test'
self.transaction.save()
self.response_data = json.dumps({
'response': {
'token': '12345',
'success': True,
'amount': 500,
'total_fees': 500,
'currency': 'AUD',
'description': 'test charge',
'email': '[email protected]',
'ip_address': '127.0.0.1',
'created_at': '2012-06-20T03:10:49Z',
'status_message': 'Success!',
'error_message': None,
'card': {
'token': 'card_nytGw7koRg23EEp9NTmz9w',
'display_number': 'XXXX-XXXX-XXXX-0000',
'scheme': 'master',
'expiry_month': 6,
'expiry_year': 2017,
'name': 'Roland Robot',
'address_line1': '42 Sevenoaks St',
'address_line2': None,
'address_city': 'Lathlain',
'address_postcode': '6454',
'address_state': 'WA',
'address_country': 'Australia',
'primary': None,
},
'transfer': None
}
})
self.response_error = json.dumps({
'error': 'invalid_resource',
'error_description':
'One or more parameters were missing or invalid.',
# Should there really be a charge token?
'charge_token': '1234',
'messages': [{
'code': 'description_invalid',
'message': 'Description can\'t be blank',
'param': 'description'
}]
})
self.response_error_no_messages = json.dumps({
'error': 'invalid_resource',
'error_description':
'One or more parameters were missing or invalid.',
# Should there really be a charge token?
'charge_token': '1234'
})
@patch('requests.post')
def test_only_process_once(self, mock_request):
""" Check that transactions are processed exactly once """
mock_request.return_value = FakeResponse(200, self.response_data)
# Shouldn't be marked as processed before process_transaction is called
# for the first time.
self.assertFalse(self.transaction.processed)
# Should be marked after the first call.
result = self.transaction.process_transaction()
self.assertTrue(self.transaction.processed)
# Shouldn't process anything the second time
result = self.transaction.process_transaction()
self.assertIsNone(result)
@override_settings(PIN_ENVIRONMENTS={})
@patch('requests.post')
def test_valid_environment(self, mock_request):
""" Check that an error is thrown with no environment """
mock_request.return_value = FakeResponse(200, self.response_data)
self.assertRaises(PinError, self.transaction.process_transaction)
@override_settings(PIN_ENVIRONMENTS=ENV_MISSING_SECRET)
@patch('requests.post')
def test_secret_set(self, mock_request):
""" Check that an error is thrown with no secret """
mock_request.return_value = FakeResponse(200, self.response_data)
self.assertRaises(ConfigError, self.transaction.process_transaction)
@override_settings(PIN_ENVIRONMENTS=ENV_MISSING_HOST)
@patch('requests.post')
def test_host_set(self, mock_request):
""" Check that an error is thrown with no host """
mock_request.return_value = FakeResponse(200, self.response_data)
self.assertRaises(ConfigError, self.transaction.process_transaction)
@patch('requests.post')
def test_response_not_json(self, mock_request):
""" Check that failure is returned for non-JSON responses """
mock_request.return_value = FakeResponse(200, '')
response = self.transaction.process_transaction()
self.assertEqual(response, 'Failure.')
@patch('requests.post')
def | (self, mock_request):
""" Check that a specific error is thrown for invalid parameters """
mock_request.return_value = FakeResponse(200, self.response_error)
response = self.transaction.process_transaction()
self.assertEqual(response, 'Failure: Description can\'t be blank')
@patch('requests.post')
def test_response_noparam(self, mock_request):
""" Check that a specific error is thrown for missing parameters """
mock_request.return_value = FakeResponse(
200, self.response_error_no_messages
)
response = self.transaction.process_transaction()
self.assertEqual(
response,
'Failure: One or more parameters were missing or invalid.'
)
@patch('requests.post')
def test_response_success(self, mock_request):
""" Check that the success response is correctly processed """
mock_request.return_value = FakeResponse(200, self.response_data)
response = self.transaction.process_transaction()
self.assertEqual(response, 'Success!')
self.assertTrue(self.transaction.succeeded)
self.assertEqual(self.transaction.transaction_token, '12345')
self.assertEqual(self.transaction.fees, 5.0)
self.assertEqual(self.transaction.pin_response, 'Success!')
self.assertEqual(self.transaction.card_address1, '42 Sevenoaks St')
self.assertIsNone(self.transaction.card_address2)
self.assertEqual(self.transaction.card_city, 'Lathlain')
self.assertEqual(self.transaction.card_state, 'WA')
self.assertEqual(self.transaction.card_postcode, '6454')
self.assertEqual(self.transaction.card_country, 'Australia')
self.assertEqual(self.transaction.card_number, 'XXXX-XXXX-XXXX-0000')
self.assertEqual(self.transaction.card_type, 'master')
| test_response_badparam |
notebook.module.ts | /*
===============LICENSE_START=======================================================
Acumos Apache-2.0
===================================================================================
Copyright (C) 2019 AT&T Intellectual Property & Tech Mahindra. All rights reserved.
===================================================================================
This Acumos software file is distributed by AT&T and Tech Mahindra
under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
This file is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===============LICENSE_END=========================================================
*/
import { NgModule, CUSTOM_ELEMENTS_SCHEMA } from '@angular/core';
import { ThemeModule } from '../../@theme/theme.module';
import { NotebookCatalogComponent } from './catalog/catalog.component';
import { NotebookViewComponent } from './view/view.component';
import { RouterModule } from '@angular/router';
@NgModule({
imports: [ThemeModule, RouterModule],
declarations: [NotebookCatalogComponent, NotebookViewComponent],
schemas: [CUSTOM_ELEMENTS_SCHEMA],
})
export class | {}
| NotebookModule |
tendermint.go | package crypto
import (
"fmt"
abci "github.com/tendermint/tendermint/abci/types"
tmCrypto "github.com/tendermint/tendermint/crypto"
tmEd25519 "github.com/tendermint/tendermint/crypto/ed25519"
tmSecp256k1 "github.com/tendermint/tendermint/crypto/secp256k1"
)
func PublicKeyFromTendermintPubKey(pubKey tmCrypto.PubKey) (PublicKey, error) {
switch pk := pubKey.(type) {
case tmEd25519.PubKeyEd25519:
return PublicKeyFromBytes(pk[:], CurveTypeEd25519)
case tmSecp256k1.PubKeySecp256k1:
return PublicKeyFromBytes(pk[:], CurveTypeSecp256k1)
default:
return PublicKey{}, fmt.Errorf("unrecognised tendermint public key type: %v", pk)
}
}
func PublicKeyFromABCIPubKey(pubKey abci.PubKey) (PublicKey, error) {
switch pubKey.Type {
case CurveTypeEd25519.ABCIType():
return PublicKey{
CurveType: CurveTypeEd25519,
PublicKey: pubKey.Data,
}, nil
case CurveTypeSecp256k1.ABCIType():
return PublicKey{
CurveType: CurveTypeEd25519,
PublicKey: pubKey.Data,
}, nil
}
return PublicKey{}, fmt.Errorf("did not recognise ABCI PubKey type: %s", pubKey.Type)
}
// PublicKey extensions
// Return the ABCI PubKey. See Tendermint protobuf.go for the go-crypto conversion this is based on
func (p PublicKey) ABCIPubKey() abci.PubKey {
return abci.PubKey{
Type: p.CurveType.ABCIType(),
Data: p.PublicKey,
}
}
| copy(pk[:], p.PublicKey)
return pk
case CurveTypeSecp256k1:
pk := tmSecp256k1.PubKeySecp256k1{}
copy(pk[:], p.PublicKey)
return pk
default:
return nil
}
}
// Signature extensions
func (sig Signature) TendermintSignature() []byte {
return sig.Signature
} | func (p PublicKey) TendermintPubKey() tmCrypto.PubKey {
switch p.CurveType {
case CurveTypeEd25519:
pk := tmEd25519.PubKeyEd25519{} |
jest.config.js | 'src/**/*.{js,jsx}',
],
transform: {
'^.+\\.jsx?$': '<rootDir>/jest.transform.js',
},
}; | module.exports = {
coverageReporters: ['lcov'],
collectCoverage: true,
collectCoverageFrom: [ |
|
test_binary.py | import numpy as np
from openephys_fileIO.fileIO import *
from openephys_fileIO.Binary import *
def | ():
# Test writing of binary data
dataFolder = 'test/data'
# Read the data in original int16 format
data,headers = load_OpenEphysRecording4BinaryFile(dataFolder,
num_data_channel=1,num_aux_channel=1, num_adc_channel=1)
print(headers)
# Write to binary file
writeBinaryData(dataFolder+'/experiment1/recording1/',data)
writeStructFile(dataFolder+'/experiment1/recording1/structure.oebin',headers)
#load the data in float format (take care of the bit per volt)
data,headers = load_OpenEphysRecording4BinaryFile(dataFolder,
num_data_channel=1,num_aux_channel=1, num_adc_channel=1,dtype=float)
# Load binary file using the offical function
data2, rate2 = Load('test/data')
np.allclose(data.T,data2['100']['0']['0'])
def test_numpy2binary():
# test write of numpy data
Fs = 30000
x = np.random.randn(3*Fs,4)
bitVolts = 0.195
dataFolder = 'test/data2'
channel_names = [f'CH{i}' for i in range(x.shape[1])]
writeBinaryData(dataFolder+'/experiment1/recording1/', x, bitVolts)
writeStructFile(dataFolder+'/experiment1/recording1/structure.oebin',samplerate=30000,
num_channels= x.shape[1], bit_volts=bitVolts,channel_names=channel_names)
# load the binary file
data, rate = Load(dataFolder)
np.allclose(x, data['100']['0']['0'])
| test_write_binary_data |
snap_lock_test.go | package snap_test
import (
"os"
"path/filepath"
"testing"
"github.com/canonical/microk8s-cluster-agent/pkg/snap"
)
func | (t *testing.T) {
s := snap.NewSnap("testdata", "testdata", nil)
if err := os.MkdirAll("testdata/var/lock", 0755); err != nil {
t.Fatalf("Failed to create directory: %s", err)
}
defer os.RemoveAll("testdata/var")
for _, tc := range []struct {
name string
file string
hasLock func() bool
}{
{name: "kubelite", file: "lite.lock", hasLock: s.HasKubeliteLock},
{name: "dqlite", file: "ha-cluster", hasLock: s.HasDqliteLock},
{name: "cert-reissue", file: "no-cert-reissue", hasLock: s.HasNoCertsReissueLock},
} {
t.Run(tc.name, func(t *testing.T) {
lockFile := filepath.Join("testdata", "var", "lock", tc.file)
if err := os.Remove(lockFile); err != nil && !os.IsNotExist(err) {
t.Fatalf("Failed to remove %s: %s", lockFile, err)
}
if tc.hasLock() {
t.Fatal("Expected not to have lock but we do")
}
if _, err := os.Create(lockFile); err != nil {
t.Fatalf("Failed to create %s: %s", lockFile, err)
}
if !tc.hasLock() {
t.Fatal("Expected to have lock but we do not")
}
os.Remove(lockFile)
})
}
}
| TestLock |
Selection.d.ts | import { IObjectWithKey, ISelection, SelectionMode } from './interfaces';
export interface ISelectionOptions {
onSelectionChanged?: () => void;
getKey?: (item: IObjectWithKey, index?: number) => string | number;
canSelectItem?: (item: IObjectWithKey, index?: number) => boolean;
selectionMode?: SelectionMode;
}
export declare class Selection implements ISelection {
count: number;
readonly mode: SelectionMode;
private _getKey;
private _canSelectItem;
private _changeEventSuppressionCount;
private _items;
private _selectedItems;
private _selectedIndices;
private _isAllSelected;
private _exemptedIndices;
private _exemptedCount;
private _keyToIndexMap;
private _anchoredIndex;
private _onSelectionChanged;
private _hasChanged;
private _unselectableIndices;
private _unselectableCount;
private _isModal;
constructor(options?: ISelectionOptions);
canSelectItem(item: IObjectWithKey, index?: number): boolean;
getKey(item: IObjectWithKey, index?: number): string;
setChangeEvents(isEnabled: boolean, suppressChange?: boolean): void;
isModal(): boolean;
setModal(isModal: boolean): void;
/**
* Selection needs the items, call this method to set them. If the set
* of items is the same, this will re-evaluate selection and index maps.
* Otherwise, shouldClear should be set to true, so that selection is
* cleared.
*/
setItems(items: IObjectWithKey[], shouldClear?: boolean): void;
getItems(): IObjectWithKey[];
getSelection(): IObjectWithKey[];
getSelectedCount(): number;
getSelectedIndices(): number[];
isRangeSelected(fromIndex: number, count: number): boolean;
isAllSelected(): boolean;
isKeySelected(key: string): boolean;
isIndexSelected(index: number): boolean;
setAllSelected(isAllSelected: boolean): void;
setKeySelected(key: string, isSelected: boolean, shouldAnchor: boolean): void;
setIndexSelected(index: number, isSelected: boolean, shouldAnchor: boolean): void;
selectToKey(key: string, clearSelection?: boolean): void;
selectToIndex(index: number, clearSelection?: boolean): void;
toggleAllSelected(): void;
toggleKeySelected(key: string): void; | } | toggleIndexSelected(index: number): void;
toggleRangeSelected(fromIndex: number, count: number): void;
private _updateCount();
private _change(); |
github.py | import requests
import json
from errors import BotException
import logging
logger = logging.getLogger(__name__)
class Github(object):
def __init__(self, repo_slug: str):
"""
Args:
repo_slug: The slug (user/repo_name) of the github repository
"""
# TODO: Add support for custom token
self.repo_slug = repo_slug
self.api_base = "https://api.github.com"
def | (self, num: int) -> dict:
"""Get the metadata of a github issue/PR
Args:
num: The issue/PR number
Returns:
dict[str, str]: Metadata about the issue/PR
Raises:
FileNotFoundError: The issue/PR was not found
"""
# Assume it's a PR. Query github's API
resp = requests.get(self.api_base + f"/repos/{self.repo_slug}/pulls/{num}")
if resp.status_code == 404 or not resp.content:
raise FileNotFoundError
# Load JSON
body = json.loads(resp.content)
if resp.status_code == 403:
# Check if this is a rate limit hit or an invalid token
if "message" in body:
logger.error(f"Rate-limit hit on {resp.url}. Consider using your own Github token.")
raise PermissionError("rate-limit hit")
logger.error(f"Forbidden on contacting {resp.url}. Check your access token.")
raise PermissionError("forbidden")
if resp.status_code != 200:
raise BotException(f"HTTP error ({resp.status_code})")
return body
| get_info_for_issue_pr |
settings-persistent-data.js | /*global ons */
import {ApiService} from "./services/api.service.js";
function disableResetMap(flag) {
const resetMapButton = document.getElementById("reset_map_button");
if (flag) {
resetMapButton.setAttribute("disabled", "true");
} else {
resetMapButton.removeAttribute("disabled");
}
}
/** @param currentStatus {import('../lib/miio/Status')} */
function initForm(currentStatus) {
var labMode = document.getElementById("lab_mode_enabled");
labMode.addEventListener("change", function() {
disableResetMap(!labMode.checked);
});
labMode.checked = (currentStatus.lab_status === 1);
disableResetMap(currentStatus.lab_status !== 1);
}
async function updateSettingsPersistentDataPage() {
var loadingBarSettingsPersistentData =
document.getElementById("loading-bar-settings-persistent-data");
loadingBarSettingsPersistentData.setAttribute("indeterminate", "indeterminate");
try {
let res = await ApiService.getCapabilities();
if (res["persistent_data"]) {
document.getElementById("persistent_data_form").classList.remove("hidden");
res = await ApiService.getCurrentStatus();
initForm(res);
} else {
loadingBarSettingsPersistentData.removeAttribute("indeterminate");
document.getElementById("persistent_data_not_supported").classList.remove("hidden");
}
} catch (err) {
ons.notification.toast(err.message,
{buttonLabel: "Dismiss", timeout: window.fn.toastErrorTimeout});
} finally {
loadingBarSettingsPersistentData.removeAttribute("indeterminate");
}
}
async function resetMap() {
var loadingBarSettingsPersistentData =
document.getElementById("loading-bar-settings-persistent-data");
loadingBarSettingsPersistentData.setAttribute("indeterminate", "indeterminate");
try {
await ApiService.resetMap();
ons.notification.toast("Map resetted!",
{buttonLabel: "Dismiss", timeout: window.fn.toastOKTimeout});
} catch (err) {
ons.notification.toast(err.message,
{buttonLabel: "Dismiss", timeout: window.fn.toastErrorTimeout});
} finally {
loadingBarSettingsPersistentData.removeAttribute("indeterminate");
}
}
async function | () {
var loadingBarSettingsPersistentData =
document.getElementById("loading-bar-settings-persistent-data");
var labMode = document.getElementById("lab_mode_enabled");
const labStatus = true === labMode.checked;
loadingBarSettingsPersistentData.setAttribute("indeterminate", "indeterminate");
try {
await ApiService.setLabStatus(labStatus);
ons.notification.toast("Saved settings!",
{buttonLabel: "Dismiss", timeout: window.fn.toastOKTimeout});
} catch (err) {
ons.notification.toast(err.message,
{buttonLabel: "Dismiss", timeout: window.fn.toastErrorTimeout});
} finally {
loadingBarSettingsPersistentData.removeAttribute("indeterminate");
}
}
window.updateSettingsPersistentDataPage = updateSettingsPersistentDataPage;
window.disableResetMap = disableResetMap;
window.resetMap = resetMap;
window.savePersistentData = savePersistentData;
| savePersistentData |
lib.rs | pub mod vote_instruction;
pub mod vote_processor;
pub mod vote_state;
use solana_sdk::pubkey::Pubkey;
const VOTE_PROGRAM_ID: [u8; 32] = [
132, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0,
];
pub fn check_id(program_id: &Pubkey) -> bool {
program_id.as_ref() == VOTE_PROGRAM_ID
}
pub fn | () -> Pubkey {
Pubkey::new(&VOTE_PROGRAM_ID)
}
| id |
sys_role.auto.go | // Code generated by dol build. DO NOT EDIT.
// source: auto.go
package types
import (
"encoding/json"
"github.com/2637309949/dolphin/packages/null"
)
// SysRole defined
type SysRole struct {
// ID defined 主键
ID null.Int `xorm:"bigint(20) pk notnull autoincr comment('主键') 'id'" json:"id" form:"id" xml:"id"`
// Name defined 名称
Name null.String `xorm:"varchar(200) notnull comment('名称') 'name'" json:"name" form:"name" xml:"name"`
// Code defined 编码
Code null.String `xorm:"varchar(36) notnull comment('编码') 'code'" json:"code" form:"code" xml:"code"`
// Status defined 状态 0:禁用 1:正常
Status null.Int `xorm:"int(11) notnull comment('状态 0:禁用 1:正常') 'status'" json:"status" form:"status" xml:"status"`
// AppIndex defined 角色app首页url
AppIndex null.String `xorm:"varchar(500) comment('角色app首页url') 'app_index'" json:"app_index" form:"app_index" xml:"app_index"`
// AdminIndex defined 角色进入后台首页组件
AdminIndex null.String `xorm:"varchar(500) comment('角色进入后台首页组件') 'admin_index'" json:"admin_index" form:"admin_index" xml:"admin_index"`
// Creater defined 创建人
Creater null.Int `xorm:"bigint(20) notnull comment('创建人') 'creater'" json:"creater" form:"creater" xml:"creater"`
// CreateTime defined 创建时间
CreateTime null.Time `xorm:"datetime notnull comment('创建时间') 'create_time'" json:"create_time" form:"create_time" xml:"create_time"`
// Updater defined 最后更新人
Updater null.Int `xorm:"bigint(20) notnull comment('最后更新人') 'updater'" json:"updater" form:"updater" xml:"updater"`
// UpdateTime defined 最后更新时间
UpdateTime null.Time `xorm:"datetime notnull comment('最后更新时间') 'update_time'" json:"update_time" form:"update_time" xml:"update_time"`
// IsDelete defined 删除标记
IsDelete null.Int `xorm:"int(11) notnull comment('删除标记') 'is_delete'" json:"is_delete" form:"is_delete" xml:"is_delete"`
// Remark defined 备注
Remark null.String `xorm:"varchar(200) comment('备注') 'remark'" json:"remark" form:"remark" xml:"remark"`
}
// TableName table name of defined SysRole
func (m *SysRole) TableName() string {
return "sys_role"
}
func (r *SysRole) Marshal() ([]byte, error) {
return json.Marshal(r)
}
func UnmarshalSysRole(data []byte) (SysRole, error) {
var r SysRole
err := json.Unmarshal(data, &r) | return r, err
} | |
pesquisa_dados.js | function | () {
let pesquisa = document.getElementById("search").value;
console.log(pesquisa);
document.cookie = `pesquisa = ${pesquisa}`;
window.location.href = "pesquisa.php";
};
function define_categoria(valor) {
console.log(valor);
document.cookie = `categoriaClicada = ${valor}`;
window.location.href = "pesquisa.php";
};
function define_anuncio(valor) {
console.log(valor);
document.cookie = `anuncioClicado = ${valor}`;
};
function define_proposta(valor) {
console.log(valor);
document.cookie = `propostaClicada = ${valor}`;
};
| define_pesquisa |
model_checkpoint.py | """
Model Checkpointing
===================
Automatically save model checkpoints during training.
"""
import os
import re
import numpy as np
from typing import Optional
import torch
from pytorch_lightning import _logger as log
from pytorch_lightning.callbacks.base import Callback
from pytorch_lightning.utilities import rank_zero_warn, rank_zero_only
class ModelCheckpoint(Callback):
r"""
Save the model after every epoch if it improves.
After training finishes, use :attr:`best_model_path` to retrieve the path to the
best checkpoint file and :attr:`best_model_score` to retrieve its score.
Args:
filepath: path to save the model file.
Can contain named formatting options to be auto-filled.
Example::
# custom path
# saves a file like: my/path/epoch_0.ckpt
>>> checkpoint_callback = ModelCheckpoint('my/path/')
# save any arbitrary metrics like `val_loss`, etc. in name
# saves a file like: my/path/epoch=2-val_loss=0.2_other_metric=0.3.ckpt
>>> checkpoint_callback = ModelCheckpoint(
... filepath='my/path/{epoch}-{val_loss:.2f}-{other_metric:.2f}'
... )
Can also be set to `None`, then it will be set to default location
during trainer construction.
monitor: quantity to monitor.
verbose: verbosity mode. Default: ``False``.
save_last: always saves the model at the end of the epoch. Default: ``False``.
save_top_k: if `save_top_k == k`,
the best k models according to
the quantity monitored will be saved.
if ``save_top_k == 0``, no models are saved.
if ``save_top_k == -1``, all models are saved.
Please note that the monitors are checked every `period` epochs.
if ``save_top_k >= 2`` and the callback is called multiple
times inside an epoch, the name of the saved file will be
appended with a version count starting with `v0`.
mode: one of {auto, min, max}.
If ``save_top_k != 0``, the decision
to overwrite the current save file is made
based on either the maximization or the
minimization of the monitored quantity. For `val_acc`,
this should be `max`, for `val_loss` this should
be `min`, etc. In `auto` mode, the direction is
automatically inferred from the name of the monitored quantity.
save_weights_only: if ``True``, then only the model's weights will be
saved (``model.save_weights(filepath)``), else the full model
is saved (``model.save(filepath)``).
period: Interval (number of epochs) between checkpoints.
Example::
>>> from pytorch_lightning import Trainer
>>> from pytorch_lightning.callbacks import ModelCheckpoint
# saves checkpoints to 'my/path/' whenever 'val_loss' has a new min
>>> checkpoint_callback = ModelCheckpoint(filepath='my/path/')
>>> trainer = Trainer(checkpoint_callback=checkpoint_callback)
# save epoch and val_loss in name
# saves a file like: my/path/sample-mnist_epoch=02_val_loss=0.32.ckpt
>>> checkpoint_callback = ModelCheckpoint(
... filepath='my/path/sample-mnist_{epoch:02d}-{val_loss:.2f}'
... )
# retrieve the best checkpoint after training
checkpoint_callback = ModelCheckpoint(filepath='my/path/')
trainer = Trainer(checkpoint_callback=checkpoint_callback)
model = ...
trainer.fit(model)
checkpoint_callback.best_model_path
"""
def __init__(self, filepath: Optional[str] = None, monitor: str = 'val_loss', verbose: bool = False,
save_last: bool = False, save_top_k: int = 1, save_weights_only: bool = False,
mode: str = 'auto', period: int = 1, prefix: str = ''):
super().__init__()
if save_top_k > 0 and filepath is not None and os.path.isdir(filepath) and len(os.listdir(filepath)) > 0: | "All files in this directory will be deleted when a checkpoint is saved!"
)
self._rank = 0
self.monitor = monitor
self.verbose = verbose
if filepath is None: # will be determined by trainer at runtime
self.dirpath, self.filename = None, None
else:
if os.path.isdir(filepath):
self.dirpath, self.filename = filepath, '{epoch}'
else:
filepath = os.path.realpath(filepath)
self.dirpath, self.filename = os.path.split(filepath)
os.makedirs(self.dirpath, exist_ok=True)
self.save_last = save_last
self.save_top_k = save_top_k
self.save_weights_only = save_weights_only
self.period = period
self.epoch_last_check = None
self.prefix = prefix
self.best_k_models = {}
# {filename: monitor}
self.kth_best_model_path = ''
self.best_model_score = 0
self.best_model_path = ''
self.save_function = None
torch_inf = torch.tensor(np.Inf)
mode_dict = {
'min': (torch_inf, 'min'),
'max': (-torch_inf, 'max'),
'auto': (-torch_inf, 'max') if 'acc' in self.monitor or self.monitor.startswith('fmeasure')
else (torch_inf, 'min'),
}
if mode not in mode_dict:
rank_zero_warn(f'ModelCheckpoint mode {mode} is unknown, '
f'fallback to auto mode.', RuntimeWarning)
mode = 'auto'
self.kth_value, self.mode = mode_dict[mode]
@property
def best(self):
rank_zero_warn("Attribute `best` has been renamed to `best_model_score` since v0.8.0"
" and will be removed in v0.10.0", DeprecationWarning)
return self.best_model_score
@property
def kth_best_model(self):
rank_zero_warn("Attribute `kth_best_model` has been renamed to `kth_best_model_path` since v0.8.0"
" and will be removed in v0.10.0", DeprecationWarning)
return self.kth_best_model_path
def _del_model(self, filepath):
if os.path.isfile(filepath):
os.remove(filepath)
def _save_model(self, filepath):
# make paths
os.makedirs(os.path.dirname(filepath), exist_ok=True)
# delegate the saving to the model
if self.save_function is not None:
self.save_function(filepath, self.save_weights_only)
else:
raise ValueError(".save_function() not set")
def check_monitor_top_k(self, current):
less_than_k_models = len(self.best_k_models) < self.save_top_k
if less_than_k_models:
return True
if not isinstance(current, torch.Tensor):
rank_zero_warn(
f'{current} is supposed to be a `torch.Tensor`. Saving checkpoint may not work correctly.'
f' HINT: check the value of {self.monitor} in your validation loop', RuntimeWarning
)
current = torch.tensor(current)
monitor_op = {
"min": torch.lt,
"max": torch.gt,
}[self.mode]
return monitor_op(current, self.best_k_models[self.kth_best_model_path])
def format_checkpoint_name(self, epoch, metrics, ver=None):
"""Generate a filename according to the defined template.
Example::
>>> tmpdir = os.path.dirname(__file__)
>>> ckpt = ModelCheckpoint(os.path.join(tmpdir, '{epoch}'))
>>> os.path.basename(ckpt.format_checkpoint_name(0, {}))
'epoch=0.ckpt'
>>> ckpt = ModelCheckpoint(os.path.join(tmpdir, '{epoch:03d}'))
>>> os.path.basename(ckpt.format_checkpoint_name(5, {}))
'epoch=005.ckpt'
>>> ckpt = ModelCheckpoint(os.path.join(tmpdir, '{epoch}-{val_loss:.2f}'))
>>> os.path.basename(ckpt.format_checkpoint_name(2, dict(val_loss=0.123456)))
'epoch=2-val_loss=0.12.ckpt'
>>> ckpt = ModelCheckpoint(os.path.join(tmpdir, '{missing:d}'))
>>> os.path.basename(ckpt.format_checkpoint_name(0, {}))
'missing=0.ckpt'
"""
# check if user passed in keys to the string
groups = re.findall(r'(\{.*?)[:\}]', self.filename)
if len(groups) == 0:
# default name
filename = f'{self.prefix}_ckpt_epoch_{epoch}'
else:
metrics['epoch'] = epoch
filename = self.filename
for tmp in groups:
name = tmp[1:]
filename = filename.replace(tmp, name + '={' + name)
if name not in metrics:
metrics[name] = 0
filename = filename.format(**metrics)
str_ver = f'_v{ver}' if ver is not None else ''
filepath = os.path.join(self.dirpath, self.prefix + filename + str_ver + '.ckpt')
return filepath
@rank_zero_only
def on_train_start(self, trainer, pl_module):
"""
Determine model checkpoint save directory at runtime. References attributes from the
Trainer's logger to determine where to save checkpoints.
"""
if self.dirpath is not None:
return # short circuit
self.filename = '{epoch}'
if trainer.logger is not None:
# weights_save_path overrides anything
save_dir = (getattr(trainer, 'weights_save_path', None)
or getattr(trainer.logger, 'save_dir', None)
or trainer.default_root_dir)
version = trainer.logger.version if isinstance(
trainer.logger.version, str) else f'version_{trainer.logger.version}'
ckpt_path = os.path.join(
save_dir,
trainer.logger.name,
version,
"checkpoints"
)
else:
ckpt_path = os.path.join(trainer.default_root_dir, "checkpoints")
self.dirpath = ckpt_path
assert trainer.global_rank == 0, 'tried to make a checkpoint from non global_rank=0'
os.makedirs(self.dirpath, exist_ok=True)
trainer.ckpt_path = ckpt_path
trainer.weights_save_path = ckpt_path
@rank_zero_only
def on_validation_end(self, trainer, pl_module):
# only run on main process
if trainer.global_rank != 0:
return
metrics = trainer.callback_metrics
epoch = trainer.current_epoch
if self.save_top_k == 0:
# no models are saved
return
if self.epoch_last_check is not None and (epoch - self.epoch_last_check) < self.period:
# skipping in this term
return
self.epoch_last_check = epoch
if self.save_last:
filepath = os.path.join(self.dirpath, self.prefix + 'last.ckpt')
self._save_model(filepath)
filepath = self.format_checkpoint_name(epoch, metrics)
version_cnt = 0
while os.path.isfile(filepath):
filepath = self.format_checkpoint_name(epoch, metrics, ver=version_cnt)
# this epoch called before
version_cnt += 1
if self.save_top_k != -1:
current = metrics.get(self.monitor)
if not isinstance(current, torch.Tensor):
rank_zero_warn(
f'The metric you returned {current} must be a `torch.Tensor` instance, checkpoint not saved'
f' HINT: what is the value of {self.monitor} in validation_epoch_end()?', RuntimeWarning
)
if current is not None:
current = torch.tensor(current)
if current is None:
rank_zero_warn(
f'Can save best model only with {self.monitor} available, skipping.', RuntimeWarning
)
elif self.check_monitor_top_k(current):
self._do_check_save(filepath, current, epoch)
elif self.verbose > 0:
log.info(f'\nEpoch {epoch:05d}: {self.monitor} was not in top {self.save_top_k}')
else:
if self.verbose > 0:
log.info(f'\nEpoch {epoch:05d}: saving model to {filepath}')
assert trainer.global_rank == 0, 'tried to make a checkpoint from non global_rank=0'
self._save_model(filepath)
def _do_check_save(self, filepath, current, epoch):
# remove kth
del_list = []
if len(self.best_k_models) == self.save_top_k and self.save_top_k > 0:
delpath = self.kth_best_model_path
self.best_k_models.pop(self.kth_best_model_path)
del_list.append(delpath)
self.best_k_models[filepath] = current
if len(self.best_k_models) == self.save_top_k:
# monitor dict has reached k elements
_op = max if self.mode == 'min' else min
self.kth_best_model_path = _op(self.best_k_models,
key=self.best_k_models.get)
self.kth_value = self.best_k_models[self.kth_best_model_path]
_op = min if self.mode == 'min' else max
self.best_model_path = _op(self.best_k_models, key=self.best_k_models.get)
self.best_model_score = self.best_k_models[self.best_model_path]
if self.verbose > 0:
log.info(
f'\nEpoch {epoch:05d}: {self.monitor} reached'
f' {current:0.5f} (best {self.best_model_score:0.5f}), saving model to'
f' {filepath} as top {self.save_top_k}')
self._save_model(filepath)
for cur_path in del_list:
if cur_path != filepath:
self._del_model(cur_path) | rank_zero_warn(
f"Checkpoint directory {filepath} exists and is not empty with save_top_k != 0." |
mod.rs | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
account_address::AccountAddress,
account_config::LBR_NAME,
account_state_blob::AccountStateBlob,
block_metadata::BlockMetadata,
chain_id::ChainId,
contract_event::ContractEvent,
ledger_info::LedgerInfo,
proof::{accumulator::InMemoryAccumulator, TransactionInfoWithProof, TransactionListProof},
transaction::authenticator::TransactionAuthenticator,
vm_status::{DiscardedVMStatus, KeptVMStatus, StatusCode, StatusType, VMStatus},
write_set::WriteSet,
};
use anyhow::{ensure, format_err, Error, Result};
use libra_crypto::{
ed25519::*,
hash::{CryptoHash, EventAccumulatorHasher},
multi_ed25519::{MultiEd25519PublicKey, MultiEd25519Signature},
traits::SigningKey,
HashValue,
};
use libra_crypto_derive::{CryptoHasher, LCSCryptoHash};
#[cfg(any(test, feature = "fuzzing"))]
use proptest_derive::Arbitrary;
use serde::{Deserialize, Serialize};
use std::{
collections::HashMap,
convert::TryFrom,
fmt,
fmt::{Display, Formatter},
};
pub mod authenticator;
mod change_set;
pub mod helpers;
pub mod metadata;
mod module;
mod script;
mod transaction_argument;
pub use change_set::ChangeSet;
pub use module::Module;
pub use script::{ArgumentABI, Script, ScriptABI, TypeArgumentABI, SCRIPT_HASH_LENGTH};
use std::ops::Deref;
pub use transaction_argument::{parse_transaction_argument, TransactionArgument};
pub type Version = u64; // Height - also used for MVCC in StateDB
// In StateDB, things readable by the genesis transaction are under this version.
pub const PRE_GENESIS_VERSION: Version = u64::max_value();
pub const MAX_TRANSACTION_SIZE_IN_BYTES: usize = 4096;
/// RawTransaction is the portion of a transaction that a client signs
#[derive(Clone, Debug, Hash, Eq, PartialEq, Serialize, Deserialize, CryptoHasher, LCSCryptoHash)]
pub struct RawTransaction {
/// Sender's address.
sender: AccountAddress,
// Sequence number of this transaction corresponding to sender's account.
sequence_number: u64,
// The transaction script to execute.
payload: TransactionPayload,
// Maximal total gas specified by wallet to spend for this transaction.
max_gas_amount: u64,
// Maximal price can be paid per gas.
gas_unit_price: u64,
gas_currency_code: String,
// Expiration timestamp for this transaction. timestamp is represented
// as u64 in seconds from Unix Epoch. If storage is queried and
// the time returned is greater than or equal to this time and this
// transaction has not been included, you can be certain that it will
// never be included.
// A transaction that doesn't expire is represented by a very large value like
// u64::max_value().
expiration_timestamp_secs: u64,
// chain ID of the Libra network this transaction is intended for
chain_id: ChainId,
}
impl RawTransaction {
/// Create a new `RawTransaction` with a payload.
///
/// It can be either to publish a module, to execute a script, or to issue a writeset
/// transaction.
pub fn new(
sender: AccountAddress,
sequence_number: u64,
payload: TransactionPayload,
max_gas_amount: u64,
gas_unit_price: u64,
gas_currency_code: String,
expiration_timestamp_secs: u64,
chain_id: ChainId,
) -> Self {
RawTransaction {
sender,
sequence_number,
payload,
max_gas_amount,
gas_unit_price,
gas_currency_code,
expiration_timestamp_secs,
chain_id,
}
}
/// Create a new `RawTransaction` with a script.
///
/// A script transaction contains only code to execute. No publishing is allowed in scripts.
pub fn new_script(
sender: AccountAddress,
sequence_number: u64,
script: Script,
max_gas_amount: u64,
gas_unit_price: u64,
gas_currency_code: String,
expiration_timestamp_secs: u64,
chain_id: ChainId,
) -> Self {
RawTransaction {
sender,
sequence_number,
payload: TransactionPayload::Script(script),
max_gas_amount,
gas_unit_price,
gas_currency_code,
expiration_timestamp_secs,
chain_id,
}
}
/// Create a new `RawTransaction` with a module to publish.
///
/// A module transaction is the only way to publish code. Only one module per transaction
/// can be published.
pub fn new_module(
sender: AccountAddress,
sequence_number: u64,
module: Module,
max_gas_amount: u64,
gas_unit_price: u64,
gas_currency_code: String,
expiration_timestamp_secs: u64,
chain_id: ChainId,
) -> Self {
RawTransaction {
sender,
sequence_number,
payload: TransactionPayload::Module(module),
max_gas_amount,
gas_unit_price,
gas_currency_code,
expiration_timestamp_secs,
chain_id,
}
}
pub fn new_write_set(
sender: AccountAddress,
sequence_number: u64,
write_set: WriteSet,
chain_id: ChainId,
) -> Self {
Self::new_change_set(
sender,
sequence_number,
ChangeSet::new(write_set, vec![]),
chain_id,
)
}
pub fn new_change_set(
sender: AccountAddress,
sequence_number: u64,
change_set: ChangeSet,
chain_id: ChainId,
) -> Self {
RawTransaction {
sender,
sequence_number,
payload: TransactionPayload::WriteSet(WriteSetPayload::Direct(change_set)),
// Since write-set transactions bypass the VM, these fields aren't relevant.
max_gas_amount: 0,
gas_unit_price: 0,
gas_currency_code: LBR_NAME.to_owned(),
// Write-set transactions are special and important and shouldn't expire.
expiration_timestamp_secs: u64::max_value(),
chain_id,
}
}
pub fn new_writeset_script(
sender: AccountAddress,
sequence_number: u64,
script: Script,
signer: AccountAddress,
chain_id: ChainId,
) -> Self {
RawTransaction {
sender,
sequence_number,
payload: TransactionPayload::WriteSet(WriteSetPayload::Script {
execute_as: signer,
script,
}),
// Since write-set transactions bypass the VM, these fields aren't relevant.
max_gas_amount: 0,
gas_unit_price: 0,
gas_currency_code: LBR_NAME.to_owned(),
// Write-set transactions are special and important and shouldn't expire.
expiration_timestamp_secs: u64::max_value(),
chain_id,
}
}
/// Signs the given `RawTransaction`. Note that this consumes the `RawTransaction` and turns it
/// into a `SignatureCheckedTransaction`.
///
/// For a transaction that has just been signed, its signature is expected to be valid.
pub fn sign(
self,
private_key: &Ed25519PrivateKey,
public_key: Ed25519PublicKey,
) -> Result<SignatureCheckedTransaction> {
let signature = private_key.sign(&self);
Ok(SignatureCheckedTransaction(SignedTransaction::new(
self, public_key, signature,
)))
}
#[cfg(any(test, feature = "fuzzing"))]
pub fn multi_sign_for_testing(
self,
private_key: &Ed25519PrivateKey,
public_key: Ed25519PublicKey,
) -> Result<SignatureCheckedTransaction> {
let signature = private_key.sign(&self);
Ok(SignatureCheckedTransaction(
SignedTransaction::new_multisig(self, public_key.into(), signature.into()),
))
}
pub fn into_payload(self) -> TransactionPayload {
self.payload
}
pub fn format_for_client(&self, get_transaction_name: impl Fn(&[u8]) -> String) -> String {
let empty_vec = vec![];
let (code, args) = match &self.payload {
TransactionPayload::WriteSet(_) => ("genesis".to_string(), &empty_vec[..]),
TransactionPayload::Script(script) => {
(get_transaction_name(script.code()), script.args())
}
TransactionPayload::Module(_) => ("module publishing".to_string(), &empty_vec[..]),
};
let mut f_args: String = "".to_string();
for arg in args {
f_args = format!("{}\n\t\t\t{:#?},", f_args, arg);
}
format!(
"RawTransaction {{ \n\
\tsender: {}, \n\
\tsequence_number: {}, \n\
\tpayload: {{, \n\
\t\ttransaction: {}, \n\
\t\targs: [ {} \n\
\t\t]\n\
\t}}, \n\
\tmax_gas_amount: {}, \n\
\tgas_unit_price: {}, \n\
\tgas_currency_code: {}, \n\
\texpiration_timestamp_secs: {:#?}, \n\
\tchain_id: {},
}}",
self.sender,
self.sequence_number,
code,
f_args,
self.max_gas_amount,
self.gas_unit_price,
self.gas_currency_code,
self.expiration_timestamp_secs,
self.chain_id,
)
}
/// Return the sender of this transaction.
pub fn sender(&self) -> AccountAddress {
self.sender
}
}
#[derive(Clone, Debug, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub enum TransactionPayload {
WriteSet(WriteSetPayload),
/// A transaction that executes code.
Script(Script),
/// A transaction that publishes code.
Module(Module),
}
#[derive(Clone, Debug, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub enum WriteSetPayload {
/// Directly passing in the write set.
Direct(ChangeSet),
/// Generate the writeset by running a script.
Script {
/// Execute the script as the designated signer.
execute_as: AccountAddress,
/// Script body that gets executed.
script: Script,
},
}
/// A transaction that has been signed.
///
/// A `SignedTransaction` is a single transaction that can be atomically executed. Clients submit
/// these to validator nodes, and the validator and executor submits these to the VM.
///
/// **IMPORTANT:** The signature of a `SignedTransaction` is not guaranteed to be verified. For a
/// transaction whose signature is statically guaranteed to be verified, see
/// [`SignatureCheckedTransaction`].
#[derive(Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct SignedTransaction {
/// The raw transaction
raw_txn: RawTransaction,
/// Public key and signature to authenticate
authenticator: TransactionAuthenticator,
}
/// A transaction for which the signature has been verified. Created by
/// [`SignedTransaction::check_signature`] and [`RawTransaction::sign`].
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct SignatureCheckedTransaction(SignedTransaction);
impl SignatureCheckedTransaction {
/// Returns the `SignedTransaction` within.
pub fn into_inner(self) -> SignedTransaction {
self.0
}
/// Returns the `RawTransaction` within.
pub fn into_raw_transaction(self) -> RawTransaction {
self.0.into_raw_transaction()
}
}
impl Deref for SignatureCheckedTransaction {
type Target = SignedTransaction;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl fmt::Debug for SignedTransaction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"SignedTransaction {{ \n \
{{ raw_txn: {:#?}, \n \
authenticator: {:#?}, \n \
}} \n \
}}",
self.raw_txn, self.authenticator
)
}
}
impl SignedTransaction {
pub fn new(
raw_txn: RawTransaction,
public_key: Ed25519PublicKey,
signature: Ed25519Signature,
) -> SignedTransaction {
let authenticator = TransactionAuthenticator::ed25519(public_key, signature);
SignedTransaction {
raw_txn,
authenticator,
}
}
pub fn new_multisig(
raw_txn: RawTransaction,
public_key: MultiEd25519PublicKey,
signature: MultiEd25519Signature,
) -> SignedTransaction {
let authenticator = TransactionAuthenticator::multi_ed25519(public_key, signature);
SignedTransaction {
raw_txn,
authenticator,
}
}
pub fn authenticator(&self) -> TransactionAuthenticator {
self.authenticator.clone()
}
pub fn sender(&self) -> AccountAddress {
self.raw_txn.sender
}
pub fn into_raw_transaction(self) -> RawTransaction {
self.raw_txn
}
pub fn sequence_number(&self) -> u64 {
self.raw_txn.sequence_number
}
pub fn chain_id(&self) -> ChainId {
self.raw_txn.chain_id
}
pub fn payload(&self) -> &TransactionPayload {
&self.raw_txn.payload
}
pub fn max_gas_amount(&self) -> u64 {
self.raw_txn.max_gas_amount
}
pub fn gas_unit_price(&self) -> u64 {
self.raw_txn.gas_unit_price
}
pub fn gas_currency_code(&self) -> &str {
&self.raw_txn.gas_currency_code
}
pub fn expiration_timestamp_secs(&self) -> u64 {
self.raw_txn.expiration_timestamp_secs
}
pub fn raw_txn_bytes_len(&self) -> usize {
lcs::to_bytes(&self.raw_txn)
.expect("Unable to serialize RawTransaction")
.len()
}
/// Checks that the signature of given transaction. Returns `Ok(SignatureCheckedTransaction)` if
/// the signature is valid.
pub fn check_signature(self) -> Result<SignatureCheckedTransaction> {
self.authenticator.verify(&self.raw_txn)?;
Ok(SignatureCheckedTransaction(self))
}
pub fn format_for_client(&self, get_transaction_name: impl Fn(&[u8]) -> String) -> String {
format!(
"SignedTransaction {{ \n \
raw_txn: {}, \n \
authenticator: {:#?}, \n \
}}",
self.raw_txn.format_for_client(get_transaction_name),
self.authenticator
)
}
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "fuzzing"), derive(Arbitrary))]
pub struct TransactionWithProof {
pub version: Version,
pub transaction: Transaction,
pub events: Option<Vec<ContractEvent>>,
pub proof: TransactionInfoWithProof,
}
impl TransactionWithProof {
pub fn new(
version: Version,
transaction: Transaction,
events: Option<Vec<ContractEvent>>,
proof: TransactionInfoWithProof,
) -> Self {
Self {
version, | transaction,
events,
proof,
}
}
/// Verifies the transaction with the proof, both carried by `self`.
///
/// A few things are ensured if no error is raised:
/// 1. This transaction exists in the ledger represented by `ledger_info`.
/// 2. This transaction is a `UserTransaction`.
/// 3. And this user transaction has the same `version`, `sender`, and `sequence_number` as
/// indicated by the parameter list. If any of these parameter is unknown to the call site
/// that is supposed to be informed via this struct, get it from the struct itself, such
/// as version and sender.
pub fn verify_user_txn(
&self,
ledger_info: &LedgerInfo,
version: Version,
sender: AccountAddress,
sequence_number: u64,
) -> Result<()> {
let signed_transaction = self.transaction.as_signed_user_txn()?;
ensure!(
self.version == version,
"Version ({}) is not expected ({}).",
self.version,
version,
);
ensure!(
signed_transaction.sender() == sender,
"Sender ({}) not expected ({}).",
signed_transaction.sender(),
sender,
);
ensure!(
signed_transaction.sequence_number() == sequence_number,
"Sequence number ({}) not expected ({}).",
signed_transaction.sequence_number(),
sequence_number,
);
let txn_hash = self.transaction.hash();
ensure!(
txn_hash == self.proof.transaction_info().transaction_hash,
"Transaction hash ({}) not expected ({}).",
txn_hash,
self.proof.transaction_info().transaction_hash,
);
if let Some(events) = &self.events {
let event_hashes: Vec<_> = events.iter().map(ContractEvent::hash).collect();
let event_root_hash =
InMemoryAccumulator::<EventAccumulatorHasher>::from_leaves(&event_hashes[..])
.root_hash();
ensure!(
event_root_hash == self.proof.transaction_info().event_root_hash,
"Event root hash ({}) not expected ({}).",
event_root_hash,
self.proof.transaction_info().event_root_hash,
);
}
self.proof.verify(ledger_info, version)
}
}
/// The status of executing a transaction. The VM decides whether or not we should `Keep` the
/// transaction output or `Discard` it based upon the execution of the transaction. We wrap these
/// decisions around a `VMStatus` that provides more detail on the final execution state of the VM.
#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
pub enum TransactionStatus {
/// Discard the transaction output
Discard(DiscardedVMStatus),
/// Keep the transaction output
Keep(KeptVMStatus),
/// Retry the transaction because it is after a ValidatorSetChange txn
Retry,
}
impl TransactionStatus {
pub fn status(&self) -> Result<KeptVMStatus, StatusCode> {
match self {
TransactionStatus::Keep(status) => Ok(status.clone()),
TransactionStatus::Discard(code) => Err(*code),
TransactionStatus::Retry => Err(StatusCode::UNKNOWN_VALIDATION_STATUS),
}
}
pub fn is_discarded(&self) -> bool {
match self {
TransactionStatus::Discard(_) => true,
TransactionStatus::Keep(_) => false,
TransactionStatus::Retry => true,
}
}
}
impl From<VMStatus> for TransactionStatus {
fn from(vm_status: VMStatus) -> Self {
match vm_status.keep_or_discard() {
Ok(recorded) => TransactionStatus::Keep(recorded),
Err(code) => TransactionStatus::Discard(code),
}
}
}
/// The result of running the transaction through the VM validator.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct VMValidatorResult {
status: Option<DiscardedVMStatus>,
score: u64,
is_governance_txn: bool,
}
impl VMValidatorResult {
pub fn new(vm_status: Option<DiscardedVMStatus>, score: u64, is_governance_txn: bool) -> Self {
debug_assert!(
match vm_status {
None => true,
Some(status) =>
status.status_type() == StatusType::Unknown
|| status.status_type() == StatusType::Validation
|| status.status_type() == StatusType::InvariantViolation,
},
"Unexpected discarded status: {:?}",
vm_status
);
Self {
status: vm_status,
score,
is_governance_txn,
}
}
pub fn status(&self) -> Option<DiscardedVMStatus> {
self.status
}
pub fn score(&self) -> u64 {
self.score
}
pub fn is_governance_txn(&self) -> bool {
self.is_governance_txn
}
}
/// The output of executing a transaction.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct TransactionOutput {
/// The list of writes this transaction intends to do.
write_set: WriteSet,
/// The list of events emitted during this transaction.
events: Vec<ContractEvent>,
/// The amount of gas used during execution.
gas_used: u64,
/// The execution status.
status: TransactionStatus,
}
impl TransactionOutput {
pub fn new(
write_set: WriteSet,
events: Vec<ContractEvent>,
gas_used: u64,
status: TransactionStatus,
) -> Self {
TransactionOutput {
write_set,
events,
gas_used,
status,
}
}
pub fn write_set(&self) -> &WriteSet {
&self.write_set
}
pub fn events(&self) -> &[ContractEvent] {
&self.events
}
pub fn gas_used(&self) -> u64 {
self.gas_used
}
pub fn status(&self) -> &TransactionStatus {
&self.status
}
}
/// `TransactionInfo` is the object we store in the transaction accumulator. It consists of the
/// transaction as well as the execution result of this transaction.
#[derive(Clone, CryptoHasher, LCSCryptoHash, Debug, Eq, PartialEq, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "fuzzing"), derive(Arbitrary))]
pub struct TransactionInfo {
/// The hash of this transaction.
transaction_hash: HashValue,
/// The root hash of Sparse Merkle Tree describing the world state at the end of this
/// transaction.
state_root_hash: HashValue,
/// The root hash of Merkle Accumulator storing all events emitted during this transaction.
event_root_hash: HashValue,
/// The amount of gas used.
gas_used: u64,
/// The vm status. If it is not `Executed`, this will provide the general error class. Execution
/// failures and Move abort's recieve more detailed information. But other errors are generally
/// categorized with no status code or other information
status: KeptVMStatus,
}
impl TransactionInfo {
/// Constructs a new `TransactionInfo` object using transaction hash, state root hash and event
/// root hash.
pub fn new(
transaction_hash: HashValue,
state_root_hash: HashValue,
event_root_hash: HashValue,
gas_used: u64,
status: KeptVMStatus,
) -> TransactionInfo {
TransactionInfo {
transaction_hash,
state_root_hash,
event_root_hash,
gas_used,
status,
}
}
/// Returns the hash of this transaction.
pub fn transaction_hash(&self) -> HashValue {
self.transaction_hash
}
/// Returns root hash of Sparse Merkle Tree describing the world state at the end of this
/// transaction.
pub fn state_root_hash(&self) -> HashValue {
self.state_root_hash
}
/// Returns the root hash of Merkle Accumulator storing all events emitted during this
/// transaction.
pub fn event_root_hash(&self) -> HashValue {
self.event_root_hash
}
/// Returns the amount of gas used by this transaction.
pub fn gas_used(&self) -> u64 {
self.gas_used
}
pub fn status(&self) -> &KeptVMStatus {
&self.status
}
}
impl Display for TransactionInfo {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
write!(
f,
"TransactionInfo: [txn_hash: {}, state_root_hash: {}, event_root_hash: {}, gas_used: {}, recorded_status: {:?}]",
self.transaction_hash(), self.state_root_hash(), self.event_root_hash(), self.gas_used(), self.status(),
)
}
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub struct TransactionToCommit {
transaction: Transaction,
account_states: HashMap<AccountAddress, AccountStateBlob>,
events: Vec<ContractEvent>,
gas_used: u64,
status: KeptVMStatus,
}
impl TransactionToCommit {
pub fn new(
transaction: Transaction,
account_states: HashMap<AccountAddress, AccountStateBlob>,
events: Vec<ContractEvent>,
gas_used: u64,
status: KeptVMStatus,
) -> Self {
TransactionToCommit {
transaction,
account_states,
events,
gas_used,
status,
}
}
pub fn transaction(&self) -> &Transaction {
&self.transaction
}
pub fn account_states(&self) -> &HashMap<AccountAddress, AccountStateBlob> {
&self.account_states
}
pub fn events(&self) -> &[ContractEvent] {
&self.events
}
pub fn gas_used(&self) -> u64 {
self.gas_used
}
pub fn status(&self) -> &KeptVMStatus {
&self.status
}
}
/// The list may have three states:
/// 1. The list is empty. Both proofs must be `None`.
/// 2. The list has only 1 transaction/transaction_info. Then `proof_of_first_transaction`
/// must exist and `proof_of_last_transaction` must be `None`.
/// 3. The list has 2+ transactions/transaction_infos. The both proofs must exist.
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub struct TransactionListWithProof {
pub transactions: Vec<Transaction>,
pub events: Option<Vec<Vec<ContractEvent>>>,
pub first_transaction_version: Option<Version>,
pub proof: TransactionListProof,
}
impl TransactionListWithProof {
/// Constructor.
pub fn new(
transactions: Vec<Transaction>,
events: Option<Vec<Vec<ContractEvent>>>,
first_transaction_version: Option<Version>,
proof: TransactionListProof,
) -> Self {
Self {
transactions,
events,
first_transaction_version,
proof,
}
}
/// Creates an empty transaction list.
pub fn new_empty() -> Self {
Self::new(vec![], None, None, TransactionListProof::new_empty())
}
/// Verifies the transaction list with the proofs, both carried on `self`.
///
/// Two things are ensured if no error is raised:
/// 1. All the transactions exist on the ledger represented by `ledger_info`.
/// 2. And the transactions in the list has consecutive versions starting from
/// `first_transaction_version`. When `first_transaction_version` is None, ensures the list is
/// empty.
pub fn verify(
&self,
ledger_info: &LedgerInfo,
first_transaction_version: Option<Version>,
) -> Result<()> {
ensure!(
self.first_transaction_version == first_transaction_version,
"First transaction version ({}) not expected ({}).",
Self::display_option_version(self.first_transaction_version),
Self::display_option_version(first_transaction_version),
);
let txn_hashes: Vec<_> = self.transactions.iter().map(CryptoHash::hash).collect();
self.proof
.verify(ledger_info, self.first_transaction_version, &txn_hashes)?;
// Verify the events if they exist.
if let Some(event_lists) = &self.events {
ensure!(
event_lists.len() == self.transactions.len(),
"The length of event_lists ({}) does not match the number of transactions ({}).",
event_lists.len(),
self.transactions.len(),
);
itertools::zip_eq(event_lists, self.proof.transaction_infos())
.map(|(events, txn_info)| {
let event_hashes: Vec<_> = events.iter().map(ContractEvent::hash).collect();
let event_root_hash =
InMemoryAccumulator::<EventAccumulatorHasher>::from_leaves(&event_hashes)
.root_hash();
ensure!(
event_root_hash == txn_info.event_root_hash(),
"Some event root hash calculated doesn't match that carried on the \
transaction info.",
);
Ok(())
})
.collect::<Result<Vec<_>>>()?;
}
Ok(())
}
pub fn is_empty(&self) -> bool {
self.transactions.is_empty()
}
pub fn len(&self) -> usize {
self.transactions.len()
}
fn display_option_version(version: Option<Version>) -> String {
match version {
Some(v) => format!("{}", v),
None => String::from("absent"),
}
}
}
/// `Transaction` will be the transaction type used internally in the libra node to represent the
/// transaction to be processed and persisted.
///
/// We suppress the clippy warning here as we would expect most of the transaction to be user
/// transaction.
#[allow(clippy::large_enum_variant)]
#[cfg_attr(any(test, feature = "fuzzing"), derive(Arbitrary))]
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, CryptoHasher, LCSCryptoHash)]
pub enum Transaction {
/// Transaction submitted by the user. e.g: P2P payment transaction, publishing module
/// transaction, etc.
/// TODO: We need to rename SignedTransaction to SignedUserTransaction, as well as all the other
/// transaction types we had in our codebase.
UserTransaction(SignedTransaction),
/// Transaction that applies a WriteSet to the current storage. This should be used for ONLY for
/// genesis right now.
WaypointWriteSet(ChangeSet),
/// Transaction to update the block metadata resource at the beginning of a block.
BlockMetadata(BlockMetadata),
}
impl Transaction {
pub fn as_signed_user_txn(&self) -> Result<&SignedTransaction> {
match self {
Transaction::UserTransaction(txn) => Ok(txn),
_ => Err(format_err!("Not a user transaction.")),
}
}
pub fn format_for_client(&self, get_transaction_name: impl Fn(&[u8]) -> String) -> String {
match self {
Transaction::UserTransaction(user_txn) => {
user_txn.format_for_client(get_transaction_name)
}
// TODO: display proper information for client
Transaction::WaypointWriteSet(_write_set) => String::from("genesis"),
// TODO: display proper information for client
Transaction::BlockMetadata(_block_metadata) => String::from("block_metadata"),
}
}
}
impl TryFrom<Transaction> for SignedTransaction {
type Error = Error;
fn try_from(txn: Transaction) -> Result<Self> {
match txn {
Transaction::UserTransaction(txn) => Ok(txn),
_ => Err(format_err!("Not a user transaction.")),
}
}
} | |
index.ts | import {exportModule} from '@clux/react-web';
import {ModuleHandlers, defaultRouteParams} from './model';
import Main from './views/Main';
| export default exportModule('photo', ModuleHandlers, defaultRouteParams, {main: () => Main}); | |
into_iter_ri.rs | use std::{iter::Step, ops::RangeInclusive};
#[derive(Debug)]
pub struct IntoIterRi<TValueType>
where | TValueType: PartialOrd, {
pub(super) range: RangeInclusive<TValueType>,
}
impl<TValueType> Iterator for IntoIterRi<TValueType>
where
TValueType: PartialOrd + Step,
{
type Item = TValueType;
fn next(&mut self) -> Option<Self::Item> { self.range.next() }
} | |
exec_test.go | package youtubedlwrapper
import (
"bytes"
"io"
"strings"
"testing"
)
type mockCommand struct {
complete bool
path string
args []string
stdin io.Reader
stdinData string
stdout io.Writer
stdoutData string
stderr io.Writer
stderrData string
startErr error
waitErr error
runErr error
}
func (mockCommand *mockCommand) Start() error {
return mockCommand.startErr
}
func (mockCommand *mockCommand) Run() error {
var err error
if mockCommand.stdin != nil {
_, err = mockCommand.stdin.Read([]byte(mockCommand.stdinData))
if err != nil {
return err
}
}
_, err = mockCommand.stdout.Write([]byte(mockCommand.stdoutData))
if err != nil {
return err
}
_, err = mockCommand.stderr.Write([]byte(mockCommand.stderrData))
if err != nil {
return err
}
return mockCommand.runErr
}
func (mockCommand *mockCommand) Wait() error {
return mockCommand.waitErr
}
func (mockCommand *mockCommand) Output() ([]byte, error) {
panic("implement me")
}
func (mockCommand *mockCommand) CombinedOutput() ([]byte, error) {
panic("implement me")
}
func (mockCommand *mockCommand) StdinPipe() (io.WriteCloser, error) {
panic("implement me")
}
func (mockCommand *mockCommand) StdoutPipe() (io.ReadCloser, error) {
panic("implement me")
}
func (mockCommand *mockCommand) StderrPipe() (io.ReadCloser, error) {
panic("implement me")
}
func (mockCommand *mockCommand) SetStdin(stdin io.Reader) {
mockCommand.stdin = stdin
}
func (mockCommand *mockCommand) SetStdout(stdout io.Writer) {
mockCommand.stdout = stdout
}
func (mockCommand *mockCommand) SetStderr(stderr io.Writer) {
mockCommand.stderr = stderr
}
func (mockCommand *mockCommand) String() string {
builder := new(strings.Builder)
builder.WriteString(mockCommand.path)
return builder.String()
}
func | (t *testing.T) {
t.Parallel()
tests := []struct {
name string
cmd mockCommand
expectedErr error
runBeforeFunc func(mockCommand *mockCommand)
}{
{
"std1",
mockCommand{
stdinData: "stdin1",
stdoutData: "stdout1",
stderrData: "stderr1",
startErr: nil,
waitErr: nil,
runErr: nil,
},
nil,
func(mockCommand *mockCommand) {
},
},
}
var err error
for _, test := range tests {
if test.runBeforeFunc != nil {
test.runBeforeFunc(&test.cmd)
}
var stdoutBuffer, stderrBuffer bytes.Buffer
test.cmd.SetStdout(&stdoutBuffer)
test.cmd.SetStderr(&stderrBuffer)
err = test.cmd.Run()
if err != test.expectedErr {
t.Errorf("test (%v), expected error (%v), got error (%v)\n", test.name, test.expectedErr, err)
}
stdout, err := io.ReadAll(&stdoutBuffer)
if err != nil {
t.Errorf("test (%v), got error (%v) while reading stdoutBuffer\n", test.name, err)
}
if stdoutString := string(stdout); stdoutString != test.cmd.stdoutData {
t.Errorf("test (%v), execCommand.stdout Reader returned stdout (%v), expected (%v)", test.name, stdoutString, test.cmd.stdoutData)
}
stderr, err := io.ReadAll(&stderrBuffer)
if err != nil {
t.Errorf("test (%v), got error (%v) while reading stderrBuffer\n", test.name, err)
}
if stderrString := string(stderr); stderrString != test.cmd.stderrData {
t.Errorf("test (%v), execCommand.stderr Reader returned stderr (%v), expected (%v)", test.name, stderrString, test.cmd.stderrData)
}
}
}
type commandMocker struct {
stdinData string
stdoutData string
stderrData string
startErr error
waitErr error
runErr error
}
func (commandMocker *commandMocker) makeMockCommand(name string, arg ...string) execCommand {
cmd := &mockCommand{
path: name,
args: arg,
stdinData: commandMocker.stdinData,
stdoutData: commandMocker.stdoutData,
stderrData: commandMocker.stderrData,
startErr: commandMocker.startErr,
waitErr: commandMocker.waitErr,
runErr: commandMocker.runErr,
}
return cmd
}
| TestYoutubeDLError_mockCommand_Run |
buttons.py | import tkinter as tk
from PIL import ImageTk, Image
from file_import import FileImport
class Buttons:
def __init__(self, parent, player):
self.player = player
self.parent = parent
#clean these up
unskip_img = ImageTk.PhotoImage(Image.open("assets/unskip.png").resize((25,25)))
unskip_btn = tk.Button(parent, image=unskip_img,
command=self.player.play_previous)
unskip_btn.image = unskip_img
unskip_btn.config(highlightbackground="#282828", highlightcolor="#282828",
bg='#282828', borderwidth=0)
unskip_btn.grid(column=0, row=0)
self.pause_img = ImageTk.PhotoImage(Image.open("assets/pause.png").resize((25,25)))
self.play_img = ImageTk.PhotoImage(Image.open("assets/play.png").resize((25,25)))
self.play_btn = tk.Button(parent, image=self.pause_img, borderwidth=0, bg='#282828',
command=self.pause_play)
self.play_btn.image = self.pause_img
self.play_btn.config(highlightbackground="#282828", highlightcolor="#282828",
bg='#282828', borderwidth=0)
self.play_btn.grid(column=1, row=0)
skip_img = ImageTk.PhotoImage(Image.open("assets/skip.png").resize((25,25)))
skip_btn = tk.Button(parent, image=skip_img,
command=self.player.play_next)
skip_btn.image = skip_img
skip_btn.config(highlightbackground="#282828", highlightcolor="#282828",
bg='#282828', borderwidth=0)
skip_btn.grid(column=2, row=0)
volume_down_img = ImageTk.PhotoImage(Image.open("assets/volume_down.png").resize((25,25)))
volume_up_btn = tk.Button(parent, image=volume_down_img,
command=self.player.volume_control)
volume_up_btn.image = volume_down_img
volume_up_btn.config(highlightbackground="#282828", highlightcolor="#282828",
bg='#282828', borderwidth=0)
volume_up_btn.grid(column=3, row=0)
volume_up_img = ImageTk.PhotoImage(Image.open("assets/volume_up.png").resize((25,25)))
volume_down_btn = tk.Button(parent, image=volume_up_img,
command=lambda: self.player.volume_control(True))
volume_down_btn.image = volume_up_img
volume_down_btn.config(highlightbackground="#282828", highlightcolor="#282828",
bg='#282828', borderwidth=0)
volume_down_btn.grid(column=4, row=0)
import_btn = tk.Button(parent, text="Import",
command=self.import_new_files)
import_btn.config(highlightbackground="#282828", highlightcolor="#282828",
bg='#282828', borderwidth=0)
import_btn.place(x=970, y=0)
def | (self):
importer = FileImport(self.parent, tk.Toplevel(self.parent))
def pause_play(self):
if self.player.is_playing:
self.play_btn.config(image=self.play_img)
else:
self.play_btn.config(image=self.pause_img)
self.player.pause_resume()
| import_new_files |
view.go | package view
import (
"fmt"
"net/http"
"sort"
"strings"
"time"
"github.com/AlecAivazis/survey/v2"
"github.com/cli/cli/internal/ghinstance"
"github.com/cli/cli/pkg/cmd/gist/shared"
"github.com/cli/cli/pkg/cmdutil"
"github.com/cli/cli/pkg/iostreams"
"github.com/cli/cli/pkg/markdown"
"github.com/cli/cli/pkg/prompt"
"github.com/cli/cli/pkg/text"
"github.com/cli/cli/utils"
"github.com/spf13/cobra"
)
type browser interface {
Browse(string) error
}
type ViewOptions struct {
IO *iostreams.IOStreams
HttpClient func() (*http.Client, error)
Browser browser
Selector string
Filename string
Raw bool
Web bool
ListFiles bool
}
func NewCmdView(f *cmdutil.Factory, runF func(*ViewOptions) error) *cobra.Command {
opts := &ViewOptions{
IO: f.IOStreams,
HttpClient: f.HttpClient,
Browser: f.Browser,
}
cmd := &cobra.Command{
Use: "view [<id> | <url>]",
Short: "View a gist",
Long: `View the given gist or select from recent gists.`,
Args: cobra.MaximumNArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
if len(args) == 1 {
opts.Selector = args[0]
}
if !opts.IO.IsStdoutTTY() {
opts.Raw = true
}
if runF != nil {
return runF(opts)
}
return viewRun(opts)
},
}
cmd.Flags().BoolVarP(&opts.Raw, "raw", "r", false, "Print raw instead of rendered gist contents")
cmd.Flags().BoolVarP(&opts.Web, "web", "w", false, "Open gist in the browser")
cmd.Flags().BoolVarP(&opts.ListFiles, "files", "", false, "List file names from the gist")
cmd.Flags().StringVarP(&opts.Filename, "filename", "f", "", "Display a single file from the gist")
return cmd
}
func viewRun(opts *ViewOptions) error {
gistID := opts.Selector
client, err := opts.HttpClient()
if err != nil {
return err
}
cs := opts.IO.ColorScheme()
if gistID == "" {
gistID, err = promptGists(client, cs)
if err != nil {
return err
}
if gistID == "" {
fmt.Fprintln(opts.IO.Out, "No gists found.")
return nil
}
}
if opts.Web {
gistURL := gistID
if !strings.Contains(gistURL, "/") {
hostname := ghinstance.OverridableDefault()
gistURL = ghinstance.GistPrefix(hostname) + gistID
}
if opts.IO.IsStderrTTY() {
fmt.Fprintf(opts.IO.ErrOut, "Opening %s in your browser.\n", utils.DisplayURL(gistURL))
}
return opts.Browser.Browse(gistURL)
}
if strings.Contains(gistID, "/") {
id, err := shared.GistIDFromURL(gistID)
if err != nil {
return err
}
gistID = id
}
gist, err := shared.GetGist(client, ghinstance.OverridableDefault(), gistID)
if err != nil {
return err
}
theme := opts.IO.DetectTerminalTheme()
markdownStyle := markdown.GetStyle(theme)
if err := opts.IO.StartPager(); err != nil {
fmt.Fprintf(opts.IO.ErrOut, "starting pager failed: %v\n", err)
}
defer opts.IO.StopPager()
render := func(gf *shared.GistFile) error {
if shared.IsBinaryContents([]byte(gf.Content)) {
if len(gist.Files) == 1 || opts.Filename != "" {
return fmt.Errorf("error: file is binary")
}
_, err = fmt.Fprintln(opts.IO.Out, cs.Gray("(skipping rendering binary content)"))
return nil
}
if strings.Contains(gf.Type, "markdown") && !opts.Raw {
rendered, err := markdown.Render(gf.Content, markdownStyle)
if err != nil {
return err
}
_, err = fmt.Fprint(opts.IO.Out, rendered)
return err
}
if _, err := fmt.Fprint(opts.IO.Out, gf.Content); err != nil {
return err
}
if !strings.HasSuffix(gf.Content, "\n") {
_, err := fmt.Fprint(opts.IO.Out, "\n")
return err
}
return nil
}
if opts.Filename != "" {
gistFile, ok := gist.Files[opts.Filename]
if !ok {
return fmt.Errorf("gist has no such file: %q", opts.Filename)
}
return render(gistFile)
}
if gist.Description != "" && !opts.ListFiles {
fmt.Fprintf(opts.IO.Out, "%s\n\n", cs.Bold(gist.Description))
}
showFilenames := len(gist.Files) > 1
filenames := make([]string, 0, len(gist.Files))
for fn := range gist.Files {
filenames = append(filenames, fn)
}
sort.Slice(filenames, func(i, j int) bool {
return strings.ToLower(filenames[i]) < strings.ToLower(filenames[j])
})
if opts.ListFiles {
for _, fn := range filenames {
fmt.Fprintln(opts.IO.Out, fn)
}
return nil
}
for i, fn := range filenames {
if showFilenames {
fmt.Fprintf(opts.IO.Out, "%s\n\n", cs.Gray(fn))
}
if err := render(gist.Files[fn]); err != nil {
return err
}
if i < len(filenames)-1 {
fmt.Fprint(opts.IO.Out, "\n")
}
}
return nil
}
func promptGists(client *http.Client, cs *iostreams.ColorScheme) (gistID string, err error) {
gists, err := shared.ListGists(client, ghinstance.OverridableDefault(), 10, "all")
if err != nil {
return "", err
}
if len(gists) == 0 {
return "", nil
}
var opts []string
var result int
var gistIDs = make([]string, len(gists))
for i, gist := range gists {
gistIDs[i] = gist.ID
description := ""
gistName := ""
if gist.Description != "" {
description = gist.Description
}
filenames := make([]string, 0, len(gist.Files))
for fn := range gist.Files {
filenames = append(filenames, fn)
}
sort.Strings(filenames)
gistName = filenames[0]
gistTime := utils.FuzzyAgo(time.Since(gist.UpdatedAt))
// TODO: support dynamic maxWidth
description = text.Truncate(100, text.ReplaceExcessiveWhitespace(description))
opt := fmt.Sprintf("%s %s %s", cs.Bold(gistName), description, cs.Gray(gistTime))
opts = append(opts, opt)
}
questions := &survey.Select{
Message: "Select a gist",
Options: opts,
}
err = prompt.SurveyAskOne(questions, &result)
if err != nil |
return gistIDs[result], nil
}
| {
return "", err
} |
github_test.go | package github
import (
"context"
"errors"
"fmt"
"io/ioutil"
"net/http"
"testing"
"time"
githubv3 "github.com/google/go-github/v37/github"
"github.com/shurcooL/githubv4"
"github.com/stretchr/testify/assert"
githubv1 "github.com/lyft/clutch/backend/api/sourcecontrol/github/v1"
sourcecontrolv1 "github.com/lyft/clutch/backend/api/sourcecontrol/v1"
)
const problem = "we've had a problem"
var (
timestamp = time.Unix(1569010072, 0)
)
type getfileMock struct {
v4client
queryError bool
refID, objID string
truncated, binary bool
}
func (g *getfileMock) Query(ctx context.Context, query interface{}, variables map[string]interface{}) error {
q, ok := query.(*getFileQuery)
if !ok {
panic("not a query")
}
if g.queryError {
return errors.New(problem)
}
if g.refID != "" {
q.Repository.Ref.Commit.ID = g.refID
q.Repository.Ref.Commit.OID = githubv4.GitObjectID(g.refID)
}
if g.objID != "" {
q.Repository.Object.Blob.OID = githubv4.GitObjectID(g.objID)
q.Repository.Object.Blob.ID = g.objID
q.Repository.Object.Blob.Text = "text"
}
q.Repository.Object.Blob.IsTruncated = githubv4.Boolean(g.truncated)
q.Repository.Object.Blob.IsBinary = githubv4.Boolean(g.binary)
q.Repository.Ref.Commit.History.Nodes = append(
q.Repository.Ref.Commit.History.Nodes,
struct {
CommittedDate githubv4.DateTime
OID githubv4.GitObjectID
}{githubv4.DateTime{Time: timestamp}, "otherSHA"},
)
return nil
}
var getFileTests = []struct {
name string
v4 getfileMock
errText string
}{
{
name: "queryError",
v4: getfileMock{queryError: true},
errText: problem,
},
{
name: "noRef",
v4: getfileMock{},
errText: "ref not found",
},
{
name: "noObject",
v4: getfileMock{refID: "abcdef12345"},
errText: "object not found",
},
{
name: "wasTruncated",
v4: getfileMock{refID: "abcdef12345", objID: "abcdef12345", truncated: true},
errText: "truncated",
},
{
name: "wasBinary",
v4: getfileMock{refID: "abcdef12345", objID: "abcdef12345", binary: true},
errText: "binary",
},
{
name: "happyPath",
v4: getfileMock{refID: "abcdef12345", objID: "abcdef12345"},
},
}
func TestGetFile(t *testing.T) {
for _, tt := range getFileTests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
a := assert.New(t)
s := &svc{graphQL: &tt.v4}
f, err := s.GetFile(context.Background(),
&RemoteRef{
RepoOwner: "owner",
RepoName: "myRepo",
Ref: "master",
},
"data/foo",
)
if tt.errText != "" {
a.Error(err)
a.Contains(err.Error(), tt.errText)
return
}
if err != nil {
a.FailNow("unexpected error")
return
}
contents, _ := ioutil.ReadAll(f.Contents)
a.Equal("text", string(contents))
a.Equal("data/foo", f.Path)
a.Equal("abcdef12345", f.SHA)
a.Equal("otherSHA", f.LastModifiedSHA)
a.Equal(timestamp, f.LastModifiedTime)
})
}
}
type mockRepositories struct {
actualOrg string
actualRepo *githubv3.Repository
generalError bool
malformedEncodingError bool
}
func (m *mockRepositories) Create(ctx context.Context, org string, repo *githubv3.Repository) (*githubv3.Repository, *githubv3.Response, error) {
m.actualRepo = repo
m.actualOrg = org
ret := &githubv3.Repository{
HTMLURL: strPtr(fmt.Sprintf("https://example.com/%s/%s", org, *repo.Name)),
}
return ret, nil, nil
}
func (m *mockRepositories) GetContents(_ context.Context, _, _, _ string, _ *githubv3.RepositoryContentGetOptions) (*githubv3.RepositoryContent, []*githubv3.RepositoryContent, *githubv3.Response, error) {
if m.generalError == true {
return nil, nil, nil, errors.New(problem)
}
if m.malformedEncodingError {
encoding := "unsupported"
return &githubv3.RepositoryContent{Encoding: &encoding}, nil, nil, nil
}
return &githubv3.RepositoryContent{}, nil, nil, nil
}
func (m *mockRepositories) CompareCommits(ctx context.Context, owner, repo, base, head string) (*githubv3.CommitsComparison, *githubv3.Response, error) {
if m.generalError {
return nil, nil, errors.New(problem)
}
returnstr := "behind"
shaStr := "astdfsaohecra"
return &githubv3.CommitsComparison{Status: &returnstr,
Commits: []*githubv3.RepositoryCommit{
{SHA: &shaStr},
}}, nil, nil
}
func (m *mockRepositories) GetCommit(ctx context.Context, owner, repo, sha string) (*githubv3.RepositoryCommit, *githubv3.Response, error) {
file := "testfile.go"
message := "committing some changes (#1)"
authorLogin := "foobar"
if m.generalError {
return &githubv3.RepositoryCommit{}, &githubv3.Response{}, errors.New(problem)
}
return &githubv3.RepositoryCommit{
Files: []*githubv3.CommitFile{
{
Filename: &file,
},
},
Commit: &githubv3.Commit{
Message: &message,
Author: &githubv3.CommitAuthor{
Login: &authorLogin,
},
},
}, nil, nil
}
type mockUsers struct {
user githubv3.User
defaultUser string
}
func (m *mockUsers) Get(ctx context.Context, user string) (*githubv3.User, *githubv3.Response, error) {
var login string
if login = user; user == "" {
login = m.defaultUser
}
ret := &githubv3.User{
Login: &login,
}
m.user = *ret
return ret, nil, nil
}
var createRepoTests = []struct {
req *sourcecontrolv1.CreateRepositoryRequest
users *mockUsers
}{
{
req: &sourcecontrolv1.CreateRepositoryRequest{
Owner: "organization",
Name: "bar",
Description: "this is an org repository",
Options: &sourcecontrolv1.CreateRepositoryRequest_GithubOptions{GithubOptions: &githubv1.CreateRepositoryOptions{
Parameters: &githubv1.RepositoryParameters{Visibility: githubv1.RepositoryParameters_PUBLIC},
}},
},
users: &mockUsers{},
},
{
req: &sourcecontrolv1.CreateRepositoryRequest{
Owner: "user",
Name: "bar",
Description: "this is my repository",
Options: &sourcecontrolv1.CreateRepositoryRequest_GithubOptions{GithubOptions: &githubv1.CreateRepositoryOptions{
Parameters: &githubv1.RepositoryParameters{Visibility: githubv1.RepositoryParameters_PRIVATE},
}},
},
users: &mockUsers{
defaultUser: "user",
},
},
}
func TestCreateRepository(t *testing.T) {
for idx, tt := range createRepoTests {
tt := tt
t.Run(fmt.Sprintf("%d", idx), func(t *testing.T) {
t.Parallel()
r := &mockRepositories{}
s := &svc{rest: v3client{
Repositories: r,
Users: tt.users,
}}
resp, err := s.CreateRepository(context.Background(), tt.req)
var expectedOwner string
if expectedOwner = tt.req.Owner; tt.req.Owner == "user" {
expectedOwner = ""
}
var expectedPrivate bool
switch tt.req.GetGithubOptions().Parameters.Visibility {
case githubv1.RepositoryParameters_PUBLIC:
expectedPrivate = false
case githubv1.RepositoryParameters_PRIVATE:
expectedPrivate = true
}
assert.NoError(t, err)
assert.Equal(t, expectedOwner, r.actualOrg)
assert.Equal(t, tt.req.Name, *r.actualRepo.Name)
assert.Equal(t, expectedPrivate, *r.actualRepo.Private)
assert.Equal(t, tt.req.Description, *r.actualRepo.Description)
assert.NotEmpty(t, resp.Url)
})
}
}
var getUserTests = []struct {
username string
}{
{
username: "foobar",
},
}
func TestGetUser(t *testing.T) {
for idx, tt := range getUserTests {
tt := tt
t.Run(fmt.Sprintf("%d", idx), func(t *testing.T) {
t.Parallel()
u := &mockUsers{}
s := &svc{rest: v3client{
Users: u,
}}
resp, err := s.GetUser(context.Background(), tt.username)
assert.NoError(t, err)
assert.Equal(t, u.user.GetLogin(), resp.GetLogin())
})
}
}
var compareCommitsTests = []struct {
name string
errorText string
status string
generalError bool
mockRepo *mockRepositories
}{
{
name: "v3 error",
generalError: true,
errorText: "Could not get comparison",
mockRepo: &mockRepositories{generalError: true},
},
{
name: "happy path",
status: "behind",
mockRepo: &mockRepositories{},
},
}
func TestCompareCommits(t *testing.T) {
t.Parallel()
for _, tt := range compareCommitsTests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
a := assert.New(t)
s := &svc{rest: v3client{
Repositories: tt.mockRepo,
}}
comp, err := s.CompareCommits(
context.Background(),
&RemoteRef{
RepoOwner: "owner",
RepoName: "myRepo",
Ref: "master",
},
"1234",
)
if tt.errorText != "" {
a.Error(err)
a.Contains(err.Error(), tt.errorText)
return
}
if err != nil {
a.FailNowf("unexpected error: %s", err.Error())
return
}
a.Equal(comp.GetStatus(), tt.status)
a.NotNil(comp.Commits)
a.Nil(err)
})
}
}
var getCommitsTests = []struct {
name string
errorText string
mockRepo *mockRepositories
file string
message string
authorLogin string
authorAvatarURL string
authorID int64
parentRef string
}{
{
name: "v3 error",
mockRepo: &mockRepositories{generalError: true},
errorText: "we've had a problem",
},
{
name: "happy path",
mockRepo: &mockRepositories{},
file: "testfile.go",
message: "committing some changes (#1)",
authorAvatarURL: "https://foo.bar/baz.png",
authorID: 1234,
parentRef: "test",
},
}
func TestGetCommit(t *testing.T) {
t.Parallel()
for _, tt := range getCommitsTests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
a := assert.New(t)
s := &svc{rest: v3client{
Repositories: tt.mockRepo,
}}
commit, err := s.GetCommit(context.Background(),
&RemoteRef{
RepoOwner: "owner",
RepoName: "myRepo",
Ref: "1234",
},
)
if tt.errorText != "" {
a.Error(err)
a.Contains(err.Error(), tt.errorText)
return
}
if err != nil {
a.FailNowf("unexpected error: %s", err.Error())
return
}
a.Equal(tt.file, *commit.Files[0].Filename)
a.Equal(tt.message, commit.Message)
if commit.Author != nil {
a.Equal(tt.authorAvatarURL, *commit.Author.AvatarURL)
a.Equal(tt.authorID, *commit.Author.ID)
}
if commit.ParentRef != "" {
a.Equal(tt.parentRef, commit.ParentRef)
}
a.Nil(err)
})
}
}
type mockOrganizations struct {
actualOrg string
actualUser string
generalError bool
authError bool
}
func (m *mockOrganizations) Get(ctx context.Context, org string) (*githubv3.Organization, *githubv3.Response, error) {
m.actualOrg = org
if m.generalError == true {
return nil, nil, errors.New(problem)
}
return &githubv3.Organization{
Name: &org,
}, nil, nil
}
func (m *mockOrganizations) List(ctx context.Context, user string, opts *githubv3.ListOptions) ([]*githubv3.Organization, *githubv3.Response, error) {
m.actualUser = user
if m.generalError == true {
return nil, nil, errors.New(problem)
}
return []*githubv3.Organization{}, nil, nil
}
func (m *mockOrganizations) GetOrgMembership(ctx context.Context, user, org string) (*githubv3.Membership, *githubv3.Response, error) {
m.actualOrg = org
m.actualUser = user
if m.generalError {
return nil, &githubv3.Response{Response: &http.Response{StatusCode: 500}}, errors.New(problem)
}
if m.authError {
return nil, &githubv3.Response{Response: &http.Response{StatusCode: 403}}, nil
}
return &githubv3.Membership{}, nil, nil
}
var getOrganizationTests = []struct {
name string
errorText string
mockOrgs *mockOrganizations
org string
}{
{
name: "v3 error",
mockOrgs: &mockOrganizations{generalError: true},
errorText: "we've had a problem",
org: "testing",
},
{
name: "v3 error",
mockOrgs: &mockOrganizations{authError: true},
org: "testing",
},
{
name: "happy path",
mockOrgs: &mockOrganizations{},
org: "testing",
},
}
func TestGetOrganization(t *testing.T) {
for idx, tt := range getOrganizationTests {
tt := tt
t.Run(fmt.Sprintf("%d", idx), func(t *testing.T) {
t.Parallel()
s := &svc{rest: v3client{
Organizations: tt.mockOrgs,
}}
resp, err := s.GetOrganization(context.Background(), tt.org)
if tt.errorText != "" {
assert.Error(t, err)
assert.Contains(t, err.Error(), tt.errorText)
} else {
assert.NoError(t, err)
assert.Equal(t, resp.GetName(), tt.org)
assert.Equal(t, tt.mockOrgs.actualOrg, tt.org)
}
})
}
}
var listOrganizationsTests = []struct {
name string
errorText string
mockOrgs *mockOrganizations
username string
}{
{
name: "v3 error",
mockOrgs: &mockOrganizations{generalError: true},
errorText: "we've had a problem",
username: "foobar",
},
{
name: "happy path",
mockOrgs: &mockOrganizations{},
username: "foobar",
},
}
func TestListOrganizations(t *testing.T) {
for idx, tt := range listOrganizationsTests {
tt := tt
t.Run(fmt.Sprintf("%d", idx), func(t *testing.T) {
t.Parallel()
s := &svc{rest: v3client{
Organizations: tt.mockOrgs,
}}
resp, err := s.ListOrganizations(context.Background(), tt.username)
if tt.errorText != "" {
assert.Error(t, err)
assert.Contains(t, err.Error(), tt.errorText)
} else {
assert.NoError(t, err)
assert.Equal(t, len(resp), 0)
assert.Equal(t, tt.mockOrgs.actualUser, tt.username)
}
})
}
}
var getOrgMembershipTests = []struct {
name string
errorText string
mockOrgs *mockOrganizations
username string
org string
}{
{
name: "v3 error",
mockOrgs: &mockOrganizations{generalError: true},
errorText: "we've had a problem",
username: "foobar",
org: "testing",
},
{
name: "happy path",
mockOrgs: &mockOrganizations{},
username: "foobar",
org: "testing",
},
}
func TestGetOrgMembership(t *testing.T) {
for idx, tt := range getOrgMembershipTests {
tt := tt
t.Run(fmt.Sprintf("%d", idx), func(t *testing.T) {
t.Parallel()
s := &svc{rest: v3client{
Organizations: tt.mockOrgs,
}}
_, err := s.GetOrgMembership(context.Background(), tt.username, tt.org)
if tt.errorText != "" {
assert.Error(t, err)
assert.Contains(t, err.Error(), tt.errorText)
} else {
assert.NoError(t, err)
assert.Equal(t, tt.mockOrgs.actualOrg, tt.org)
assert.Equal(t, tt.mockOrgs.actualUser, tt.username)
}
})
}
}
type getRepositoryMock struct {
v4client
branchName string
}
func (g *getRepositoryMock) Query(ctx context.Context, query interface{}, variables map[string]interface{}) error {
q, ok := query.(*getRepositoryQuery)
if !ok {
panic("not a query")
}
q.Repository.DefaultBranchRef.Name = g.branchName
return nil
}
var getDefaultBranchTests = []struct {
name string
v4 getRepositoryMock
wantDefaultBranch string
}{
{
name: "1. default repo with main branch",
v4: getRepositoryMock{branchName: "main"},
wantDefaultBranch: "main",
},
{
name: "2. default repo with master branch",
v4: getRepositoryMock{branchName: "master"},
wantDefaultBranch: "master",
},
}
func | (t *testing.T) {
t.Parallel()
for _, tt := range getDefaultBranchTests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
a := assert.New(t)
s := &svc{graphQL: &tt.v4}
repo, err := s.GetRepository(context.Background(),
&RemoteRef{
RepoOwner: "owner",
RepoName: "myRepo",
},
)
if err != nil {
a.FailNowf("unexpected error: %s", err.Error())
return
}
gotDefaultBranch := repo.DefaultBranch
a.Equal(gotDefaultBranch, tt.wantDefaultBranch)
a.Nil(err)
})
}
}
| TestGetRepository |
app.ts | import { Store } from 'vuex'
import RootState from '@vue-storefront/core/types/RootState'
import Vue from 'vue'
import buildTimeConfig from 'config'
import { isServer } from '@vue-storefront/core/helpers'
import { Logger } from '@vue-storefront/core/lib/logger'
// Plugins
import i18n from '@vue-storefront/i18n'
import VueRouter from 'vue-router'
import VueLazyload from 'vue-lazyload'
import Vuelidate from 'vuelidate'
import Meta from 'vue-meta'
import { sync } from 'vuex-router-sync'
import VueObserveVisibility from 'vue-observe-visibility'
// Apollo GraphQL client
import { getApolloProvider } from './scripts/resolvers/resolveGraphQL'
// TODO simplify by removing global mixins, plugins and filters - it can be done in normal 'vue' way
import { registerTheme } from '@vue-storefront/core/lib/themes'
import { themeEntry } from 'theme/index.js'
import { registerModules } from '@vue-storefront/core/lib/module'
import { prepareStoreView } from '@vue-storefront/core/lib/multistore'
import * as coreMixins from '@vue-storefront/core/mixins'
import * as coreFilters from '@vue-storefront/core/filters'
import * as corePlugins from '@vue-storefront/core/compatibility/plugins'
import { once } from '@vue-storefront/core/helpers'
import { takeOverConsole } from '@vue-storefront/core/helpers/log'
import store from '@vue-storefront/core/store'
import { enabledModules } from './modules-entry'
// Will be deprecated in 1.8
import { registerExtensions } from '@vue-storefront/core/compatibility/lib/extensions'
import { registerExtensions as extensions } from 'src/extensions'
function createRouter (): VueRouter {
return new VueRouter({
mode: 'history',
base: __dirname,
scrollBehavior: (to, from, savedPosition) => {
if (to.hash) {
return {
selector: to.hash
}
}
if (savedPosition) {
return savedPosition
} else {
return {x: 0, y: 0}
}
}
})
}
let router: VueRouter = null
Vue.use(VueRouter)
const createApp = async (ssrContext, config): Promise<{app: Vue, router: VueRouter, store: Store<RootState>}> => {
router = createRouter()
// sync router with vuex 'router' store
sync(store, router)
// TODO: Don't mutate the state directly, use mutation instead
store.state.version = '1.8.3'
store.state.config = config
store.state.__DEMO_MODE__ = (config.demomode === true) ? true : false
if(ssrContext) Vue.prototype.$ssrRequestContext = ssrContext
if (!store.state.config) store.state.config = buildTimeConfig // if provided from SSR, don't replace it
const storeView = prepareStoreView(null) // prepare the default storeView
store.state.storeView = storeView
// store.state.shipping.methods = shippingMethods
Vue.use(Vuelidate)
Vue.use(VueLazyload, {attempt: 2, preLoad: 1.5})
Vue.use(Meta)
Vue.use(VueObserveVisibility)
// to depreciate in near future
once('__VUE_EXTEND__', () => {
Object.keys(corePlugins).forEach(key => {
Vue.use(corePlugins[key])
})
Object.keys(coreMixins).forEach(key => {
Vue.mixin(coreMixins[key])
})
})
Object.keys(coreFilters).forEach(key => {
Vue.filter(key, coreFilters[key])
})
let vueOptions = {
router,
store,
i18n,
render: h => h(themeEntry)
}
const apolloProvider = await getApolloProvider()
if (apolloProvider) Object.assign(vueOptions, {provider: apolloProvider})
const app = new Vue(vueOptions)
const appContext = {
isServer,
ssrContext
}
registerModules(enabledModules, appContext)
registerExtensions(extensions, app, router, store, config, ssrContext)
registerTheme(buildTimeConfig.theme, app, router, store, store.state.config, ssrContext)
app.$emit('application-after-init', app) |
export { router, createApp } |
return { app, router, store }
} |
android-project-service.ts | import * as path from "path";
import * as shell from "shelljs";
import * as constants from "../constants";
import * as semver from "semver";
import * as projectServiceBaseLib from "./platform-project-service-base";
import { DeviceAndroidDebugBridge } from "../common/mobile/android/device-android-debug-bridge";
import { Configurations, LiveSyncPaths } from "../common/constants";
import { hook } from "../common/helpers";
import { performanceLog } from ".././common/decorators";
export class | extends projectServiceBaseLib.PlatformProjectServiceBase {
private static VALUES_DIRNAME = "values";
private static VALUES_VERSION_DIRNAME_PREFIX = AndroidProjectService.VALUES_DIRNAME + "-v";
private static ANDROID_PLATFORM_NAME = "android";
private static MIN_RUNTIME_VERSION_WITH_GRADLE = "1.5.0";
constructor(private $androidToolsInfo: IAndroidToolsInfo,
private $errors: IErrors,
$fs: IFileSystem,
private $logger: ILogger,
$projectDataService: IProjectDataService,
private $injector: IInjector,
private $devicePlatformsConstants: Mobile.IDevicePlatformsConstants,
private $androidPluginBuildService: IAndroidPluginBuildService,
private $platformEnvironmentRequirements: IPlatformEnvironmentRequirements,
private $androidResourcesMigrationService: IAndroidResourcesMigrationService,
private $filesHashService: IFilesHashService,
private $gradleCommandService: IGradleCommandService,
private $gradleBuildService: IGradleBuildService,
private $analyticsService: IAnalyticsService) {
super($fs, $projectDataService);
}
private _platformData: IPlatformData = null;
public getPlatformData(projectData: IProjectData): IPlatformData {
if (!projectData && !this._platformData) {
throw new Error("First call of getPlatformData without providing projectData.");
}
if (projectData && projectData.platformsDir) {
const projectRoot = path.join(projectData.platformsDir, AndroidProjectService.ANDROID_PLATFORM_NAME);
const appDestinationDirectoryArr = [projectRoot, constants.APP_FOLDER_NAME, constants.SRC_DIR, constants.MAIN_DIR, constants.ASSETS_DIR];
const configurationsDirectoryArr = [projectRoot, constants.APP_FOLDER_NAME, constants.SRC_DIR, constants.MAIN_DIR, constants.MANIFEST_FILE_NAME];
const deviceBuildOutputArr = [projectRoot, constants.APP_FOLDER_NAME, constants.BUILD_DIR, constants.OUTPUTS_DIR, constants.APK_DIR];
const packageName = this.getProjectNameFromId(projectData);
this._platformData = {
frameworkPackageName: constants.TNS_ANDROID_RUNTIME_NAME,
normalizedPlatformName: "Android",
platformNameLowerCase: "android",
appDestinationDirectoryPath: path.join(...appDestinationDirectoryArr),
platformProjectService: <any>this,
projectRoot: projectRoot,
getBuildOutputPath: (buildOptions: IBuildOutputOptions) => {
if (buildOptions.androidBundle) {
return path.join(projectRoot, constants.APP_FOLDER_NAME, constants.BUILD_DIR, constants.OUTPUTS_DIR, constants.BUNDLE_DIR);
}
return path.join(...deviceBuildOutputArr);
},
getValidBuildOutputData: (buildOptions: IBuildOutputOptions): IValidBuildOutputData => {
const buildMode = buildOptions.release ? Configurations.Release.toLowerCase() : Configurations.Debug.toLowerCase();
if (buildOptions.androidBundle) {
return {
packageNames: [
`${constants.APP_FOLDER_NAME}${constants.AAB_EXTENSION_NAME}`,
`${constants.APP_FOLDER_NAME}-${buildMode}${constants.AAB_EXTENSION_NAME}`
]
};
}
return {
packageNames: [
`${packageName}-${buildMode}${constants.APK_EXTENSION_NAME}`,
`${projectData.projectName}-${buildMode}${constants.APK_EXTENSION_NAME}`,
`${projectData.projectName}${constants.APK_EXTENSION_NAME}`,
`${constants.APP_FOLDER_NAME}-${buildMode}${constants.APK_EXTENSION_NAME}`
],
regexes: [new RegExp(`${constants.APP_FOLDER_NAME}-.*-(${Configurations.Debug}|${Configurations.Release})${constants.APK_EXTENSION_NAME}`, "i"), new RegExp(`${packageName}-.*-(${Configurations.Debug}|${Configurations.Release})${constants.APK_EXTENSION_NAME}`, "i")]
};
},
configurationFileName: constants.MANIFEST_FILE_NAME,
configurationFilePath: path.join(...configurationsDirectoryArr),
relativeToFrameworkConfigurationFilePath: path.join(constants.SRC_DIR, constants.MAIN_DIR, constants.MANIFEST_FILE_NAME),
fastLivesyncFileExtensions: [".jpg", ".gif", ".png", ".bmp", ".webp"] // http://developer.android.com/guide/appendix/media-formats.html
};
}
return this._platformData;
}
public getCurrentPlatformVersion(platformData: IPlatformData, projectData: IProjectData): string {
const currentPlatformData: IDictionary<any> = this.$projectDataService.getNSValue(projectData.projectDir, platformData.frameworkPackageName);
return currentPlatformData && currentPlatformData[constants.VERSION_STRING];
}
public async validateOptions(): Promise<boolean> {
return true;
}
public getAppResourcesDestinationDirectoryPath(projectData: IProjectData): string {
const appResourcesDirStructureHasMigrated = this.$androidResourcesMigrationService.hasMigrated(projectData.getAppResourcesDirectoryPath());
if (appResourcesDirStructureHasMigrated) {
return this.getUpdatedAppResourcesDestinationDirPath(projectData);
} else {
return this.getLegacyAppResourcesDestinationDirPath(projectData);
}
}
public async validate(projectData: IProjectData, options: IOptions, notConfiguredEnvOptions?: INotConfiguredEnvOptions): Promise<IValidatePlatformOutput> {
this.validatePackageName(projectData.projectIdentifiers.android);
this.validateProjectName(projectData.projectName);
const checkEnvironmentRequirementsOutput = await this.$platformEnvironmentRequirements.checkEnvironmentRequirements({
platform: this.getPlatformData(projectData).normalizedPlatformName,
projectDir: projectData.projectDir,
options,
notConfiguredEnvOptions
});
this.$androidToolsInfo.validateTargetSdk({ showWarningsAsErrors: true, projectDir: projectData.projectDir });
return {
checkEnvironmentRequirementsOutput
};
}
public async createProject(frameworkDir: string, frameworkVersion: string, projectData: IProjectData): Promise<void> {
if (semver.lt(frameworkVersion, AndroidProjectService.MIN_RUNTIME_VERSION_WITH_GRADLE)) {
this.$errors.fail(`The NativeScript CLI requires Android runtime ${AndroidProjectService.MIN_RUNTIME_VERSION_WITH_GRADLE} or later to work properly.`);
}
this.$fs.ensureDirectoryExists(this.getPlatformData(projectData).projectRoot);
const androidToolsInfo = this.$androidToolsInfo.getToolsInfo({ projectDir: projectData.projectDir });
const targetSdkVersion = androidToolsInfo && androidToolsInfo.targetSdkVersion;
this.$logger.trace(`Using Android SDK '${targetSdkVersion}'.`);
this.copy(this.getPlatformData(projectData).projectRoot, frameworkDir, "*", "-R");
// TODO: Check if we actually need this and if it should be targetSdk or compileSdk
this.cleanResValues(targetSdkVersion, projectData);
}
private getResDestinationDir(projectData: IProjectData): string {
const appResourcesDirStructureHasMigrated = this.$androidResourcesMigrationService.hasMigrated(projectData.getAppResourcesDirectoryPath());
if (appResourcesDirStructureHasMigrated) {
const appResourcesDestinationPath = this.getUpdatedAppResourcesDestinationDirPath(projectData);
return path.join(appResourcesDestinationPath, constants.MAIN_DIR, constants.RESOURCES_DIR);
} else {
return this.getLegacyAppResourcesDestinationDirPath(projectData);
}
}
private cleanResValues(targetSdkVersion: number, projectData: IProjectData): void {
const resDestinationDir = this.getResDestinationDir(projectData);
const directoriesInResFolder = this.$fs.readDirectory(resDestinationDir);
const directoriesToClean = directoriesInResFolder
.map(dir => {
return {
dirName: dir,
sdkNum: parseInt(dir.substr(AndroidProjectService.VALUES_VERSION_DIRNAME_PREFIX.length))
};
})
.filter(dir => dir.dirName.match(AndroidProjectService.VALUES_VERSION_DIRNAME_PREFIX)
&& dir.sdkNum
&& (!targetSdkVersion || (targetSdkVersion < dir.sdkNum)))
.map(dir => path.join(resDestinationDir, dir.dirName));
this.$logger.trace("Directories to clean:");
this.$logger.trace(directoriesToClean);
_.map(directoriesToClean, dir => this.$fs.deleteDirectory(dir));
}
public async interpolateData(projectData: IProjectData): Promise<void> {
// Interpolate the apilevel and package
this.interpolateConfigurationFile(projectData);
const appResourcesDirectoryPath = projectData.getAppResourcesDirectoryPath();
let stringsFilePath: string;
const appResourcesDestinationDirectoryPath = this.getAppResourcesDestinationDirectoryPath(projectData);
if (this.$androidResourcesMigrationService.hasMigrated(appResourcesDirectoryPath)) {
stringsFilePath = path.join(appResourcesDestinationDirectoryPath, constants.MAIN_DIR, constants.RESOURCES_DIR, 'values', 'strings.xml');
} else {
stringsFilePath = path.join(appResourcesDestinationDirectoryPath, 'values', 'strings.xml');
}
shell.sed('-i', /__NAME__/, projectData.projectName, stringsFilePath);
shell.sed('-i', /__TITLE_ACTIVITY__/, projectData.projectName, stringsFilePath);
const gradleSettingsFilePath = path.join(this.getPlatformData(projectData).projectRoot, "settings.gradle");
shell.sed('-i', /__PROJECT_NAME__/, this.getProjectNameFromId(projectData), gradleSettingsFilePath);
try {
// will replace applicationId in app/App_Resources/Android/app.gradle if it has not been edited by the user
const appGradleContent = this.$fs.readText(projectData.appGradlePath);
if (appGradleContent.indexOf(constants.PACKAGE_PLACEHOLDER_NAME) !== -1) {
//TODO: For compatibility with old templates. Once all templates are updated should delete.
shell.sed('-i', new RegExp(constants.PACKAGE_PLACEHOLDER_NAME), projectData.projectIdentifiers.android, projectData.appGradlePath);
}
} catch (e) {
this.$logger.trace(`Templates updated and no need for replace in app.gradle.`);
}
}
public interpolateConfigurationFile(projectData: IProjectData): void {
const manifestPath = this.getPlatformData(projectData).configurationFilePath;
shell.sed('-i', /__PACKAGE__/, projectData.projectIdentifiers.android, manifestPath);
}
private getProjectNameFromId(projectData: IProjectData): string {
let id: string;
if (projectData && projectData.projectIdentifiers && projectData.projectIdentifiers.android) {
const idParts = projectData.projectIdentifiers.android.split(".");
id = idParts[idParts.length - 1];
}
return id;
}
public afterCreateProject(projectRoot: string): void {
return null;
}
public async updatePlatform(currentVersion: string, newVersion: string, canUpdate: boolean, projectData: IProjectData, addPlatform?: Function, removePlatforms?: (platforms: string[]) => Promise<void>): Promise<boolean> {
if (semver.eq(newVersion, AndroidProjectService.MIN_RUNTIME_VERSION_WITH_GRADLE)) {
const platformLowercase = this.getPlatformData(projectData).normalizedPlatformName.toLowerCase();
await removePlatforms([platformLowercase.split("@")[0]]);
await addPlatform(platformLowercase);
return false;
}
return true;
}
@performanceLog()
@hook('buildAndroid')
public async buildProject(projectRoot: string, projectData: IProjectData, buildData: IAndroidBuildData): Promise<void> {
const platformData = this.getPlatformData(projectData);
await this.$gradleBuildService.buildProject(platformData.projectRoot, buildData);
const outputPath = platformData.getBuildOutputPath(buildData);
await this.$filesHashService.saveHashesForProject(this._platformData, outputPath);
await this.trackKotlinUsage(projectRoot);
}
public async buildForDeploy(projectRoot: string, projectData: IProjectData, buildData?: IAndroidBuildData): Promise<void> {
return this.buildProject(projectRoot, projectData, buildData);
}
public isPlatformPrepared(projectRoot: string, projectData: IProjectData): boolean {
return this.$fs.exists(path.join(this.getPlatformData(projectData).appDestinationDirectoryPath, constants.APP_FOLDER_NAME));
}
public getFrameworkFilesExtensions(): string[] {
return [".jar", ".dat"];
}
public async prepareProject(): Promise<void> {
// Intentionally left empty.
}
public ensureConfigurationFileInAppResources(projectData: IProjectData): void {
const appResourcesDirectoryPath = projectData.appResourcesDirectoryPath;
const appResourcesDirStructureHasMigrated = this.$androidResourcesMigrationService.hasMigrated(appResourcesDirectoryPath);
let originalAndroidManifestFilePath;
if (appResourcesDirStructureHasMigrated) {
originalAndroidManifestFilePath = path.join(appResourcesDirectoryPath, this.$devicePlatformsConstants.Android, "src", "main", this.getPlatformData(projectData).configurationFileName);
} else {
originalAndroidManifestFilePath = path.join(appResourcesDirectoryPath, this.$devicePlatformsConstants.Android, this.getPlatformData(projectData).configurationFileName);
}
const manifestExists = this.$fs.exists(originalAndroidManifestFilePath);
if (!manifestExists) {
this.$logger.warn('No manifest found in ' + originalAndroidManifestFilePath);
return;
}
// Overwrite the AndroidManifest from runtime.
if (!appResourcesDirStructureHasMigrated) {
this.$fs.copyFile(originalAndroidManifestFilePath, this.getPlatformData(projectData).configurationFilePath);
}
}
public prepareAppResources(projectData: IProjectData): void {
const platformData = this.getPlatformData(projectData);
const projectAppResourcesPath = projectData.getAppResourcesDirectoryPath(projectData.projectDir);
const platformsAppResourcesPath = this.getAppResourcesDestinationDirectoryPath(projectData);
this.cleanUpPreparedResources(projectData);
this.$fs.ensureDirectoryExists(platformsAppResourcesPath);
const appResourcesDirStructureHasMigrated = this.$androidResourcesMigrationService.hasMigrated(projectAppResourcesPath);
if (appResourcesDirStructureHasMigrated) {
this.$fs.copyFile(path.join(projectAppResourcesPath, platformData.normalizedPlatformName, constants.SRC_DIR, "*"), platformsAppResourcesPath);
} else {
this.$fs.copyFile(path.join(projectAppResourcesPath, platformData.normalizedPlatformName, "*"), platformsAppResourcesPath);
// https://github.com/NativeScript/android-runtime/issues/899
// App_Resources/Android/libs is reserved to user's aars and jars, but they should not be copied as resources
this.$fs.deleteDirectory(path.join(platformsAppResourcesPath, "libs"));
}
const androidToolsInfo = this.$androidToolsInfo.getToolsInfo({ projectDir: projectData.projectDir });
const compileSdkVersion = androidToolsInfo && androidToolsInfo.compileSdkVersion;
this.cleanResValues(compileSdkVersion, projectData);
}
public async preparePluginNativeCode(pluginData: IPluginData, projectData: IProjectData): Promise<void> {
// build Android plugins which contain AndroidManifest.xml and/or resources
const pluginPlatformsFolderPath = this.getPluginPlatformsFolderPath(pluginData, AndroidProjectService.ANDROID_PLATFORM_NAME);
if (this.$fs.exists(pluginPlatformsFolderPath)) {
const options: IPluginBuildOptions = {
projectDir: projectData.projectDir,
pluginName: pluginData.name,
platformsAndroidDirPath: pluginPlatformsFolderPath,
aarOutputDir: pluginPlatformsFolderPath,
tempPluginDirPath: path.join(projectData.platformsDir, "tempPlugin")
};
if (await this.$androidPluginBuildService.buildAar(options)) {
this.$logger.info(`Built aar for ${options.pluginName}`);
}
this.$androidPluginBuildService.migrateIncludeGradle(options);
}
}
public async processConfigurationFilesFromAppResources(): Promise<void> {
return;
}
public async removePluginNativeCode(pluginData: IPluginData, projectData: IProjectData): Promise<void> {
// not implemented
}
public async beforePrepareAllPlugins(projectData: IProjectData, dependencies?: IDependencyData[]): Promise<void> {
if (dependencies) {
dependencies = this.filterUniqueDependencies(dependencies);
this.provideDependenciesJson(projectData, dependencies);
}
}
public async handleNativeDependenciesChange(projectData: IProjectData, opts: IRelease): Promise<void> {
return;
}
private filterUniqueDependencies(dependencies: IDependencyData[]): IDependencyData[] {
const depsDictionary = dependencies.reduce((dict, dep) => {
const collision = dict[dep.name];
// in case there are multiple dependencies to the same module, the one declared in the package.json takes precedence
if (!collision || collision.depth > dep.depth) {
dict[dep.name] = dep;
}
return dict;
}, <IDictionary<IDependencyData>>{});
return _.values(depsDictionary);
}
private provideDependenciesJson(projectData: IProjectData, dependencies: IDependencyData[]): void {
const platformDir = path.join(projectData.platformsDir, AndroidProjectService.ANDROID_PLATFORM_NAME);
const dependenciesJsonPath = path.join(platformDir, constants.DEPENDENCIES_JSON_NAME);
const nativeDependencies = dependencies
.filter(AndroidProjectService.isNativeAndroidDependency)
.map(({ name, directory }) => ({ name, directory: path.relative(platformDir, directory) }));
const jsonContent = JSON.stringify(nativeDependencies, null, 4);
this.$fs.writeFile(dependenciesJsonPath, jsonContent);
}
private static isNativeAndroidDependency({ nativescript }: IDependencyData): boolean {
return nativescript && (nativescript.android || (nativescript.platforms && nativescript.platforms.android));
}
public async stopServices(projectRoot: string): Promise<ISpawnResult> {
const result = await this.$gradleCommandService.executeCommand(["--stop", "--quiet"], {
cwd: projectRoot,
message: "Gradle stop services...",
stdio: "pipe"
});
return result;
}
public async cleanProject(projectRoot: string): Promise<void> {
await this.$gradleBuildService.cleanProject(projectRoot, <any>{ release: false });
}
public async cleanDeviceTempFolder(deviceIdentifier: string, projectData: IProjectData): Promise<void> {
const adb = this.$injector.resolve(DeviceAndroidDebugBridge, { identifier: deviceIdentifier });
const deviceRootPath = `${LiveSyncPaths.ANDROID_TMP_DIR_NAME}/${projectData.projectIdentifiers.android}`;
await adb.executeShellCommand(["rm", "-rf", deviceRootPath]);
}
public async checkForChanges(): Promise<void> {
// Nothing android specific to check yet.
}
public getDeploymentTarget(projectData: IProjectData): semver.SemVer { return; }
private copy(projectRoot: string, frameworkDir: string, files: string, cpArg: string): void {
const paths = files.split(' ').map(p => path.join(frameworkDir, p));
shell.cp(cpArg, paths, projectRoot);
}
private validatePackageName(packageName: string): void {
//Make the package conform to Java package types
//Enforce underscore limitation
if (!/^[a-zA-Z]+(\.[a-zA-Z0-9][a-zA-Z0-9_]*)+$/.test(packageName)) {
this.$errors.fail("Package name must look like: com.company.Name");
}
//Class is a reserved word
if (/\b[Cc]lass\b/.test(packageName)) {
this.$errors.fail("class is a reserved word");
}
}
private validateProjectName(projectName: string): void {
if (projectName === '') {
this.$errors.fail("Project name cannot be empty");
}
//Classes in Java don't begin with numbers
if (/^[0-9]/.test(projectName)) {
this.$errors.fail("Project name must not begin with a number");
}
}
private getLegacyAppResourcesDestinationDirPath(projectData: IProjectData): string {
const resourcePath: string[] = [constants.APP_FOLDER_NAME, constants.SRC_DIR, constants.MAIN_DIR, constants.RESOURCES_DIR];
return path.join(this.getPlatformData(projectData).projectRoot, ...resourcePath);
}
private getUpdatedAppResourcesDestinationDirPath(projectData: IProjectData): string {
const resourcePath: string[] = [constants.APP_FOLDER_NAME, constants.SRC_DIR];
return path.join(this.getPlatformData(projectData).projectRoot, ...resourcePath);
}
/**
* The purpose of this method is to delete the previously prepared user resources.
* The content of the `<platforms>/android/.../res` directory is based on user's resources and gradle project template from android-runtime.
* During preparation of the `<path to user's App_Resources>/Android` we want to clean all the users files from previous preparation,
* but keep the ones that were introduced during `platform add` of the android-runtime.
* Currently the Gradle project template contains resources only in values and values-v21 directories.
* So the current logic of the method is cleaning al resources from `<platforms>/android/.../res` that are not in `values.*` directories
* and that exist in the `<path to user's App_Resources>/Android/.../res` directory
* This means that if user has a resource file in values-v29 for example, builds the project and then deletes this resource,
* it will be kept in platforms directory. Reference issue: `https://github.com/NativeScript/nativescript-cli/issues/5083`
* Same is valid for files in `drawable-<resolution>` directories - in case in user's resources there's drawable-hdpi directory,
* which is deleted after the first build of app, it will remain in platforms directory.
*/
private cleanUpPreparedResources(projectData: IProjectData): void {
let resourcesDirPath = path.join(projectData.appResourcesDirectoryPath, this.getPlatformData(projectData).normalizedPlatformName);
if (this.$androidResourcesMigrationService.hasMigrated(projectData.appResourcesDirectoryPath)) {
resourcesDirPath = path.join(resourcesDirPath, constants.SRC_DIR, constants.MAIN_DIR, constants.RESOURCES_DIR);
}
const valuesDirRegExp = /^values/;
if (this.$fs.exists(resourcesDirPath)) {
const resourcesDirs = this.$fs.readDirectory(resourcesDirPath).filter(resDir => !resDir.match(valuesDirRegExp));
const resDestinationDir = this.getResDestinationDir(projectData);
_.each(resourcesDirs, currentResource => {
this.$fs.deleteDirectory(path.join(resDestinationDir, currentResource));
});
}
}
private async trackKotlinUsage(projectRoot: string): Promise<void> {
const buildStatistics = this.tryGetAndroidBuildStatistics(projectRoot);
try {
if (buildStatistics && buildStatistics.kotlinUsage) {
const analyticsDelimiter = constants.AnalyticsEventLabelDelimiter;
const hasUseKotlinPropertyInAppData = `hasUseKotlinPropertyInApp${analyticsDelimiter}${buildStatistics.kotlinUsage.hasUseKotlinPropertyInApp}`;
const hasKotlinRuntimeClassesData = `hasKotlinRuntimeClasses${analyticsDelimiter}${buildStatistics.kotlinUsage.hasKotlinRuntimeClasses}`;
await this.$analyticsService.trackEventActionInGoogleAnalytics({
action: constants.TrackActionNames.UsingKotlin,
additionalData: `${hasUseKotlinPropertyInAppData}${analyticsDelimiter}${hasKotlinRuntimeClassesData}`
});
}
} catch (e) {
this.$logger.trace(`Failed to track android build statistics. Error is: ${e.message}`);
}
}
private tryGetAndroidBuildStatistics(projectRoot: string): Object {
const staticsFilePath = path.join(projectRoot, constants.ANDROID_ANALYTICS_DATA_DIR, constants.ANDROID_ANALYTICS_DATA_FILE);
let buildStatistics;
if (this.$fs.exists(staticsFilePath)) {
try {
buildStatistics = this.$fs.readJson(staticsFilePath);
} catch (e) {
this.$logger.trace(`Unable to read android build statistics file. Error is ${e.message}`);
}
}
return buildStatistics;
}
}
$injector.register("androidProjectService", AndroidProjectService);
| AndroidProjectService |
service_maintenance.go | package action
import (
"flag"
"fmt"
"github.com/hashicorp/go-multierror"
)
type serviceMaintenance struct {
enabled bool
reason string
*config
}
func ServiceMaintenanceAction() Action |
func (s *serviceMaintenance) CommandFlags() *flag.FlagSet {
f := s.newFlagSet(FLAG_NONE)
f.BoolVar(&s.enabled, "enabled", true, "Boolean value for maintenance mode")
f.StringVar(&s.reason, "reason", "", "Reason for entering maintenance mode")
return f
}
func (s *serviceMaintenance) Run(args []string) error {
if len(args) <= 0 {
return fmt.Errorf("No service IDs specified")
}
agent, err := s.newAgent()
if err != nil {
return err
}
var result error
for _, id := range args {
if s.enabled {
if err := agent.EnableServiceMaintenance(id, s.reason); err != nil {
result = multierror.Append(result, err)
}
} else {
if err := agent.DisableServiceMaintenance(id); err != nil {
result = multierror.Append(result, err)
}
}
}
return result
}
| {
return &serviceMaintenance{
config: &gConfig,
}
} |
field.go | package ezform
import (
"errors"
"github.com/nathan-osman/go-ezform/fields"
"github.com/nathan-osman/go-reflectr"
)
var (
errInvalidField = errors.New("field must be a pointer to struct")
errInvalidValue = errors.New("field contains an invalid value")
)
// field parses the value provided for the field and validates it.
func | (f interface{}, fieldValue string) error {
field := reflectr.Struct(f)
if !field.IsPtr() {
return errInvalidField
}
i, err := field.Field("Field").Type(fields.Field{}).Addr()
if err != nil {
return err
}
fieldField := i.(*fields.Field)
e, err := parse(field, fieldValue)
if err != nil {
return err
}
if e != nil {
fieldField.Error = e
return errInvalidValue
}
v, err := value(field)
if err != nil {
return err
}
for _, validator := range fieldField.Validators {
e, err := run(validator, v)
if err != nil {
return err
}
if e != nil {
fieldField.Error = e
return errInvalidValue
}
}
return nil
}
| field |
stateless.go | package wf
import (
"github.com/lyraproj/puppet-evaluator/eval"
"github.com/lyraproj/servicesdk/wfapi"
)
type stateless struct { | activity
function interface{}
}
func NewStateless(name string, when wfapi.Condition, input, output []eval.Parameter, function interface{}) wfapi.Stateless {
return &stateless{activity{name, when, input, output}, function}
}
func (s *stateless) Label() string {
return `stateless ` + s.name
}
func (s *stateless) Function() interface{} {
return s.function
} | |
number_test.go | /*
* Licensed to the Apache Software Foundation (ASF) under one or more | * this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package missing_number
import "testing"
func TestMissingNumber(t *testing.T) {
var temp = []int{0}
MissingNumber(temp)
} | * contributor license agreements. See the NOTICE file distributed with |
grpc.go | package server
import (
"github.com/go-kratos/kratos/v2/log" | "github.com/lalifeier/vvgo-mall/app/ums/service/internal/service"
)
// NewGRPCServer new a gRPC server.
func NewGRPCServer(c *conf.Server, umsService *service.UmsService, logger log.Logger) *grpc.Server {
var opts = []grpc.ServerOption{
grpc.Middleware(
recovery.Recovery(),
),
}
if c.Grpc.Network != "" {
opts = append(opts, grpc.Network(c.Grpc.Network))
}
if c.Grpc.Addr != "" {
opts = append(opts, grpc.Address(c.Grpc.Addr))
}
if c.Grpc.Timeout != nil {
opts = append(opts, grpc.Timeout(c.Grpc.Timeout.AsDuration()))
}
srv := grpc.NewServer(opts...)
v1.RegisterUmsServer(srv, umsService)
return srv
} | "github.com/go-kratos/kratos/v2/middleware/recovery"
"github.com/go-kratos/kratos/v2/transport/grpc"
v1 "github.com/lalifeier/vvgo-mall/api/ums/service/v1"
"github.com/lalifeier/vvgo-mall/app/ums/service/internal/conf" |
alchemyMapping_TopicModeling.py | __author__ = 'renhao.cui'
import utilities
from sklearn import cross_validation
import combinedMapping as cm
import modelUtility
def alchemyTrainInfer(alchemy_train, alchemy_test, label_train, label_test, trainProbFlag):
# model from A to B: model[A] = {B: score}
(model, cand, candProb) = cm.mappingTrainer4(alchemy_train, label_train)
predictions = utilities.outputMappingResult3_fullList(model, cand, candProb, alchemy_test)
predictionsTrain = {}
if trainProbFlag:
predictionsTrain = utilities.outputMappingResult3_fullList(model, cand, candProb, alchemy_train)
correct = 0.0
total = 0.0
for index, label in enumerate(label_test):
pred = predictions[index][1].keys()[0]
if pred == label:
correct += 1.0
total += 1.0
return correct/total, predictions, predictionsTrain
def run():
| brandList = ['Elmers', 'Chilis', 'BathAndBodyWorks', 'Dominos', 'Triclosan']
outputFile = open('results/alchemy.result', 'w')
for brand in brandList:
print brand
topics, alchemyOutput = modelUtility.readData2('HybridData/Original/' + brand + '.keyword', 'HybridData/Original/' + brand + '.alchemy')
accuracySum = 0.0
for i in range(5):
alchemy_train, alchemy_test, label_train, label_test = cross_validation.train_test_split(alchemyOutput, topics, test_size=0.2, random_state=0)
accuracy, testOutput, trainOutput = alchemyTrainInfer(alchemy_train, alchemy_test, label_train, label_test, True)
accuracySum += accuracy
print accuracySum / 5
outputFile.write(brand+'\t'+str(accuracySum/5)+'\n')
outputFile.close() |
|
certs_test.go | /*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package phases
import (
"os"
"testing"
"github.com/spf13/cobra"
kubeadmapi "k8s.io/kubernetes/cmd/kubeadm/app/apis/kubeadm"
"k8s.io/kubernetes/cmd/kubeadm/app/cmd/phases/workflow"
"k8s.io/kubernetes/cmd/kubeadm/app/phases/certs"
certstestutil "k8s.io/kubernetes/cmd/kubeadm/app/util/certs"
"k8s.io/kubernetes/cmd/kubeadm/app/util/pkiutil"
testutil "k8s.io/kubernetes/cmd/kubeadm/test"
)
type testCertsData struct {
cfg *kubeadmapi.InitConfiguration
}
func (t *testCertsData) Cfg() *kubeadmapi.InitConfiguration { return t.cfg }
func (t *testCertsData) ExternalCA() bool { return false }
func (t *testCertsData) CertificateDir() string { return t.cfg.CertificatesDir }
func (t *testCertsData) CertificateWriteDir() string { return t.cfg.CertificatesDir }
func TestCertsWithCSRs(t *testing.T) |
func TestCreateSparseCerts(t *testing.T) {
for _, test := range certstestutil.GetSparseCertTestCases(t) {
t.Run(test.Name, func(t *testing.T) {
tmpdir := testutil.SetupTempDir(t)
defer os.RemoveAll(tmpdir)
certstestutil.WritePKIFiles(t, tmpdir, test.Files)
r := workflow.NewRunner()
r.AppendPhase(NewCertsPhase())
r.SetDataInitializer(func(*cobra.Command) (workflow.RunData, error) {
certsData := &testCertsData{
cfg: testutil.GetDefaultInternalConfig(t),
}
certsData.cfg.CertificatesDir = tmpdir
return certsData, nil
})
if err := r.Run(); (err != nil) != test.ExpectError {
t.Fatalf("expected error to be %t, got %t (%v)", test.ExpectError, (err != nil), err)
}
})
}
}
| {
csrDir := testutil.SetupTempDir(t)
defer os.RemoveAll(csrDir)
certDir := testutil.SetupTempDir(t)
defer os.RemoveAll(certDir)
cert := &certs.KubeadmCertAPIServer
certsData := &testCertsData{
cfg: testutil.GetDefaultInternalConfig(t),
}
certsData.cfg.CertificatesDir = certDir
// global vars
csrOnly = true
csrDir = certDir
defer func() {
csrOnly = false
}()
phase := NewCertsPhase()
// find the api cert phase
var apiServerPhase *workflow.Phase
for _, phase := range phase.Phases {
if phase.Name == cert.Name {
apiServerPhase = &phase
break
}
}
if apiServerPhase == nil {
t.Fatalf("couldn't find apiserver phase")
}
err := apiServerPhase.Run(certsData)
if err != nil {
t.Fatalf("couldn't run API server phase: %v", err)
}
if _, _, err := pkiutil.TryLoadCSRAndKeyFromDisk(csrDir, cert.BaseName); err != nil {
t.Fatalf("couldn't load certificate %q: %v", cert.BaseName, err)
}
} |
core.rs | /* ------------------------------------------------------------
PrettyGrammar
Project.Github: "https://github.com/kerryeon/pretty-grammar"
---------------------------------------------------------------
Author:
Name: "kerryeon"
Email: "[email protected]"
Github: "https://github.com/kerryeon"
Generated:
Date: "3/6/2019"
------------------------------------------------------------ */
/// Correct the string to match the grammar.
///
/// # Examples
///
/// ```
/// use pretty_grammar::*;
///
/// let result = translate("kr", "철수<은> 영희<를> 좋아합니다.".to_owned());
/// assert_eq!("철수는 영희를 좋아합니다.", result);
/// ```
///
/// # Panics
/// You should not use unsupported `lang`s.
///
pub fn translate(lang: &'static str, msg: String) -> String {
for (target, func) in super::FILTER.it | grammar.
///
/// # Examples
///
/// ```
/// use pretty_grammar::*;
///
/// let name = "철수";
/// let obj = "영희";
/// let format = "{name}<은> {obj}<를> 좋아합니다.";
/// let result = translate!(format with
/// lang: "kr",
/// name: name,
/// obj: obj,
/// );
/// assert_eq!("철수는 영희를 좋아합니다.", result);
/// ```
///
/// # Panics
/// You should not use unsupported `lang`s.
///
#[macro_export]
macro_rules! translate {
($msg: tt with lang: $lang: tt, $($tag: ident: $arg: tt),* ) => ({
$crate::translate(
$lang, format_dyn!($msg with $($tag: $arg),*)
)
});
// For unnecessary comma
($msg: tt with lang: $lang: tt, $($tag: ident: $arg: tt),*, ) => ({
translate!($msg with lang: $lang, $($tag: $arg),*)
});
}
/// Creates a `String` using dynamic `format` and interpolation of runtime expressions.
///
/// # Examples
///
/// ```
/// use pretty_grammar::*;
///
/// let name = "철수";
/// let obj = "영희";
/// let format = "{name}는 {obj}를 좋아합니다.";
/// let result = format_dyn!(format with
/// name: name,
/// obj: obj,
/// );
/// assert_eq!("철수는 영희를 좋아합니다.", result);
/// ```
///
#[macro_export]
macro_rules! format_dyn {
// [Example]
// {msg} with name: {name}, age: {age}
($msg: tt with $($tag: ident: $arg: tt),* ) => ({
let mut msg: String = $msg.to_owned();
$(
msg = msg.replace(format!("{{{}}}", stringify!($tag)).as_str(), $arg);
)*
msg
});
// For unnecessary comma
($msg: tt with $($tag: ident: $arg: tt),*, ) => ({
format_dyn!($msg with $($tag: $arg),*)
});
}
| er() {
if *target == lang {
return func(msg)
}
}
// Not Found
panic!()
}
/// Correct `dynamic` string to match the |
files.rs | use roxmltree::Node;
use std::convert::TryFrom;
use std::convert::TryInto;
use std::path::PathBuf;
#[derive(Debug)]
pub struct Files {
files: Vec<File>,
}
impl<'a, 'd: 'a> TryFrom<Node<'a, 'd>> for Files {
type Error = String;
fn try_from(node: Node) -> Result<Self, Self::Error> |
}
#[derive(Debug)]
pub struct File {
path: PathBuf,
file_type: Filetype,
uid: u32,
gid: u32,
mode: u32,
hash: String,
}
impl<'a, 'd: 'a> TryFrom<Node<'a, 'd>> for File {
type Error = String;
fn try_from(node: Node) -> Result<Self, Self::Error> {
let path: PathBuf = node
.children()
.filter(Node::is_element)
.find(|c| c.has_tag_name("Path"))
.and_then(|n| n.text())
.ok_or_else(|| "Path node not found".to_owned())?
.into();
let file_type: Filetype = node
.children()
.filter(Node::is_element)
.find(|c| c.has_tag_name("Type"))
.ok_or_else(|| "Type node not found".to_owned())
.and_then(TryInto::try_into)?;
let uid: u32 = node
.children()
.filter(Node::is_element)
.find(|c| c.has_tag_name("Uid"))
.and_then(|n| n.text())
.ok_or_else(|| "Uid node not found".to_owned())?
.parse()
.map_err(|err| format!("{:?}", err))?;
let gid: u32 = node
.children()
.filter(Node::is_element)
.find(|c| c.has_tag_name("Gid"))
.and_then(|n| n.text())
.ok_or_else(|| "Gid node not found".to_owned())?
.parse()
.map_err(|err| format!("{:?}", err))?;
let mode: u32 = node
.children()
.filter(Node::is_element)
.find(|c| c.has_tag_name("Mode"))
.and_then(|n| n.text())
.ok_or_else(|| "Mode node not found".to_owned())
.and_then(|s| u32::from_str_radix(s, 8).map_err(|err| format!("{:?}", err)))?;
let hash = node
.children()
.filter(Node::is_element)
.find(|c| c.has_tag_name("Hash"))
.and_then(|n| n.text())
.ok_or_else(|| "Hash node not found".to_owned())?
.to_owned();
Ok(Self {
path,
file_type,
uid,
gid,
mode,
hash,
})
}
}
#[derive(Debug)]
pub enum Filetype {
Executable,
Library,
Data,
Man,
Doc,
}
impl<'a, 'd: 'a> TryFrom<Node<'a, 'd>> for Filetype {
type Error = String;
fn try_from(node: Node) -> Result<Self, Self::Error> {
let file_type_str = node.text().ok_or_else(|| "Type node not found")?;
Ok(match file_type_str {
"executable" => Filetype::Executable,
"library" => Filetype::Library,
"data" => Filetype::Data,
"man" => Filetype::Man,
"doc" => Filetype::Doc,
err => return Err(format!("Unknown file type {}", err)),
})
}
}
| {
if node.has_tag_name("Files")
&& node
.children()
.filter(Node::is_element)
.all(|c| c.has_tag_name("File"))
{
let files: Vec<Result<File, String>> = node
.children()
.filter(Node::is_element)
.map(TryInto::try_into)
.collect();
if !files.is_empty() {
if let Some(err) = files.iter().find_map(|n| match n {
Err(err) => Some(err),
Ok(_) => None,
}) {
return Err(err.clone());
}
let files = files.into_iter().map(Result::unwrap).collect();
Ok(Self { files })
} else {
Err("File node not found".to_owned())
}
} else {
Err("Files node not found".to_owned())
}
} |
PlayerList.js | import React, { Component } from 'react'
import { connect } from 'react-redux'
import { NavLink } from 'react-router-dom'
import { Card, CardBody, CardHeader, CardTitle, Col, Row, Table } from 'reactstrap'
import { getPlayers } from '../../redux/actions/playersActions'
import PlayerAddForm from './PlayerAddForm'
class | extends Component {
componentDidMount() {
this.props.getPlayers()
}
addPlayerToList(player) {
this.props.getPlayers()
}
render() {
const { players } = this.props.players
const colorCirlcleStyle = (color) => {
return ({
width: '2rem',
height: '2rem',
borderRadius: '50%',
marginRight: '0.5rem',
background: color
});
};
return (
<>
<div className="content">
<Row>
<Col md="3">
<Card>
<CardHeader>
<CardTitle tag="h4">Speler toevoegen</CardTitle>
</CardHeader>
<CardBody>
<PlayerAddForm onSubmit={(player) => this.addPlayerToList(player)}/>
</CardBody>
</Card>
</Col>
<Col md="9">
<Card>
<CardHeader>
<CardTitle tag="h4">Spelers</CardTitle>
</CardHeader>
<CardBody>
<Table responsive className='playerlist__table'>
<thead className="text-primary">
<tr>
<th>Naam</th>
<th>Aantal games</th>
<th>Wins</th>
<th className="text-right">Win percentage</th>
</tr>
</thead>
<tbody>
{players.map((item) => {
return (
<tr key={item.id} >
<td><NavLink style={{display: 'flex', alignItems: 'center'}} to={'/admin/player/' + item.id}><div className='color-circle' style={colorCirlcleStyle(item.color)}></div> {item.Name}</NavLink></td>
<td>{item.games.length}</td>
<td>{item.wins.length}</td>
<td className="text-right">{item.games.length > 0 ? ((item.wins.length / item.games.length) * 100).toFixed(0) + '%' : '-'}</td>
</tr>
);
})}
</tbody>
</Table>
</CardBody>
</Card>
</Col>
</Row>
</div>
</>
)
}
}
const mapStateToProps = (state) => ({ players: state.players })
export default connect(mapStateToProps, { getPlayers })(PlayerList) | PlayerList |
app.py | import os
from flask import Flask, render_template, request, redirect, url_for
app = Flask(__name__)
@app.route('/', methods=['GET'])
def index(): | app.run(host='0.0.0.0', port=port, debug=True) | return render_template('index.html')
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000)) |
call_transaction_authority_check.ts | import {BasicRuleConfig} from "./_basic_rule_config";
import {ABAPRule} from "./_abap_rule";
import {ABAPFile} from "../files";
import {Issue, Statements} from "..";
import {IRuleMetadata, RuleTag} from "./_irule";
export class CallTransactionAuthorityCheckConf extends BasicRuleConfig {
}
export class CallTransactionAuthorityCheck extends ABAPRule {
private conf = new CallTransactionAuthorityCheckConf();
public getMetadata(): IRuleMetadata {
return {
key: "call_transaction_authority_check",
title: "Call Transaction Authority-Check", | tags: [RuleTag.Styleguide],
badExample: `CALL TRANSACTION 'FOO'.`,
goodExample: `CALL TRANSACTION 'FOO' WITH AUTHORITY-CHECK.`,
};
}
private getMessage(): string {
return "Add an authority check to CALL TRANSACTION";
}
public getConfig() {
return this.conf;
}
public setConfig(conf: CallTransactionAuthorityCheckConf) {
this.conf = conf;
}
public runParsed(file: ABAPFile) {
const issues: Issue[] = [];
for (const statNode of file.getStatements()) {
const statement = statNode.get();
if (statement instanceof Statements.CallTransaction && !statNode.concatTokensWithoutStringsAndComments().toUpperCase().includes("WITH AUTHORITY-CHECK")) {
issues.push(Issue.atStatement(file, statNode, this.getMessage(), this.getMetadata().key));
}
}
return issues;
}
} | shortDescription: `Checks that usages of CALL TRANSACTION contain an authority-check.`,
extendedInformation: `https://docs.abapopenchecks.org/checks/54/`, |
BackendConnector.js | var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }();
function _defaults(obj, defaults) { var keys = Object.getOwnPropertyNames(defaults); for (var i = 0; i < keys.length; i++) { var key = keys[i]; var value = Object.getOwnPropertyDescriptor(defaults, key); if (value && value.configurable && obj[key] === undefined) { Object.defineProperty(obj, key, value); } } return obj; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : _defaults(subClass, superClass); }
import * as utils from './utils';
import baseLogger from './logger';
import EventEmitter from './EventEmitter';
function | (arr, what) {
var found = arr.indexOf(what);
while (found !== -1) {
arr.splice(found, 1);
found = arr.indexOf(what);
}
}
var Connector = function (_EventEmitter) {
_inherits(Connector, _EventEmitter);
function Connector(backend, store, services) {
var options = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
_classCallCheck(this, Connector);
var _this = _possibleConstructorReturn(this, _EventEmitter.call(this));
_this.backend = backend;
_this.store = store;
_this.services = services;
_this.options = options;
_this.logger = baseLogger.create('backendConnector');
_this.state = {};
_this.queue = [];
_this.backend && _this.backend.init && _this.backend.init(services, options.backend, options);
return _this;
}
Connector.prototype.queueLoad = function queueLoad(languages, namespaces, callback) {
var _this2 = this;
// find what needs to be loaded
var toLoad = [],
pending = [],
toLoadLanguages = [],
toLoadNamespaces = [];
languages.forEach(function (lng) {
var hasAllNamespaces = true;
namespaces.forEach(function (ns) {
var name = lng + '|' + ns;
if (_this2.store.hasResourceBundle(lng, ns)) {
_this2.state[name] = 2; // loaded
} else if (_this2.state[name] < 0) {
// nothing to do for err
} else if (_this2.state[name] === 1) {
if (pending.indexOf(name) < 0) pending.push(name);
} else {
_this2.state[name] = 1; // pending
hasAllNamespaces = false;
if (pending.indexOf(name) < 0) pending.push(name);
if (toLoad.indexOf(name) < 0) toLoad.push(name);
if (toLoadNamespaces.indexOf(ns) < 0) toLoadNamespaces.push(ns);
}
});
if (!hasAllNamespaces) toLoadLanguages.push(lng);
});
if (toLoad.length || pending.length) {
this.queue.push({
pending: pending,
loaded: {},
errors: [],
callback: callback
});
}
return {
toLoad: toLoad,
pending: pending,
toLoadLanguages: toLoadLanguages,
toLoadNamespaces: toLoadNamespaces
};
};
Connector.prototype.loaded = function loaded(name, err, data) {
var _this3 = this;
var _name$split = name.split('|'),
_name$split2 = _slicedToArray(_name$split, 2),
lng = _name$split2[0],
ns = _name$split2[1];
if (err) this.emit('failedLoading', lng, ns, err);
if (data) {
this.store.addResourceBundle(lng, ns, data);
}
// set loaded
this.state[name] = err ? -1 : 2;
// callback if ready
this.queue.forEach(function (q) {
utils.pushPath(q.loaded, [lng], ns);
remove(q.pending, name);
if (err) q.errors.push(err);
if (q.pending.length === 0 && !q.done) {
q.errors.length ? q.callback(q.errors) : q.callback();
_this3.emit('loaded', q.loaded);
q.done = true;
}
});
// remove done load requests
this.queue = this.queue.filter(function (q) {
return !q.done;
});
};
Connector.prototype.read = function read(lng, ns, fcName, tried, wait, callback) {
var _this4 = this;
if (!tried) tried = 0;
if (!wait) wait = 250;
if (!lng.length) return callback(null, {}); // noting to load
this.backend[fcName](lng, ns, function (err, data) {
if (err && data /* = retryFlag */ && tried < 5) {
setTimeout(function () {
_this4.read.call(_this4, lng, ns, fcName, ++tried, wait * 2, callback);
}, wait);
return;
}
callback(err, data);
});
};
Connector.prototype.load = function load(languages, namespaces, callback) {
var _this5 = this;
if (!this.backend) {
this.logger.warn('No backend was added via i18next.use. Will not load resources.');
return callback && callback();
}
var options = _extends({}, this.backend.options, this.options.backend);
if (typeof languages === 'string') languages = this.services.languageUtils.toResolveHierarchy(languages);
if (typeof namespaces === 'string') namespaces = [namespaces];
var toLoad = this.queueLoad(languages, namespaces, callback);
if (!toLoad.toLoad.length) {
if (!toLoad.pending.length) callback(); // nothing to load and no pendings...callback now
return; // pendings will trigger callback
}
// load with multi-load
if (options.allowMultiLoading && this.backend.readMulti) {
this.read(toLoad.toLoadLanguages, toLoad.toLoadNamespaces, 'readMulti', null, null, function (err, data) {
if (err) _this5.logger.warn('loading namespaces ' + toLoad.toLoadNamespaces.join(', ') + ' for languages ' + toLoad.toLoadLanguages.join(', ') + ' via multiloading failed', err);
if (!err && data) _this5.logger.log('loaded namespaces ' + toLoad.toLoadNamespaces.join(', ') + ' for languages ' + toLoad.toLoadLanguages.join(', ') + ' via multiloading', data);
toLoad.toLoad.forEach(function (name) {
var _name$split3 = name.split('|'),
_name$split4 = _slicedToArray(_name$split3, 2),
l = _name$split4[0],
n = _name$split4[1];
var bundle = utils.getPath(data, [l, n]);
if (bundle) {
_this5.loaded(name, err, bundle);
} else {
var _err = 'loading namespace ' + n + ' for language ' + l + ' via multiloading failed';
_this5.loaded(name, _err);
_this5.logger.error(_err);
}
});
});
}
// load one by one
else {
(function () {
var readOne = function readOne(name) {
var _this6 = this;
var _name$split5 = name.split('|'),
_name$split6 = _slicedToArray(_name$split5, 2),
lng = _name$split6[0],
ns = _name$split6[1];
this.read(lng, ns, 'read', null, null, function (err, data) {
if (err) _this6.logger.warn('loading namespace ' + ns + ' for language ' + lng + ' failed', err);
if (!err && data) _this6.logger.log('loaded namespace ' + ns + ' for language ' + lng, data);
_this6.loaded(name, err, data);
});
};
;
toLoad.toLoad.forEach(function (name) {
readOne.call(_this5, name);
});
})();
}
};
Connector.prototype.reload = function reload(languages, namespaces) {
var _this7 = this;
if (!this.backend) {
this.logger.warn('No backend was added via i18next.use. Will not load resources.');
}
var options = _extends({}, this.backend.options, this.options.backend);
if (typeof languages === 'string') languages = this.services.languageUtils.toResolveHierarchy(languages);
if (typeof namespaces === 'string') namespaces = [namespaces];
// load with multi-load
if (options.allowMultiLoading && this.backend.readMulti) {
this.read(languages, namespaces, 'readMulti', null, null, function (err, data) {
if (err) _this7.logger.warn('reloading namespaces ' + namespaces.join(', ') + ' for languages ' + languages.join(', ') + ' via multiloading failed', err);
if (!err && data) _this7.logger.log('reloaded namespaces ' + namespaces.join(', ') + ' for languages ' + languages.join(', ') + ' via multiloading', data);
languages.forEach(function (l) {
namespaces.forEach(function (n) {
var bundle = utils.getPath(data, [l, n]);
if (bundle) {
_this7.loaded(l + '|' + n, err, bundle);
} else {
var _err2 = 'reloading namespace ' + n + ' for language ' + l + ' via multiloading failed';
_this7.loaded(l + '|' + n, _err2);
_this7.logger.error(_err2);
}
});
});
});
}
// load one by one
else {
(function () {
var readOne = function readOne(name) {
var _this8 = this;
var _name$split7 = name.split('|'),
_name$split8 = _slicedToArray(_name$split7, 2),
lng = _name$split8[0],
ns = _name$split8[1];
this.read(lng, ns, 'read', null, null, function (err, data) {
if (err) _this8.logger.warn('reloading namespace ' + ns + ' for language ' + lng + ' failed', err);
if (!err && data) _this8.logger.log('reloaded namespace ' + ns + ' for language ' + lng, data);
_this8.loaded(name, err, data);
});
};
;
languages.forEach(function (l) {
namespaces.forEach(function (n) {
readOne.call(_this7, l + '|' + n);
});
});
})();
}
};
Connector.prototype.saveMissing = function saveMissing(languages, namespace, key, fallbackValue) {
if (this.backend && this.backend.create) this.backend.create(languages, namespace, key, fallbackValue);
// write to store to avoid resending
if (!languages || !languages[0]) return;
this.store.addResource(languages[0], namespace, key, fallbackValue);
};
return Connector;
}(EventEmitter);
export default Connector; | remove |
s3_test.go | package s3
import (
"io/ioutil"
"os"
"strconv"
"testing"
"github.com/crowdmob/goamz/aws"
"github.com/docker/distribution/storagedriver"
"github.com/docker/distribution/storagedriver/testsuites"
"gopkg.in/check.v1"
)
// Hook up gocheck into the "go test" runner.
func Test(t *testing.T) { check.TestingT(t) }
func init() {
accessKey := os.Getenv("AWS_ACCESS_KEY")
secretKey := os.Getenv("AWS_SECRET_KEY")
bucket := os.Getenv("S3_BUCKET")
encrypt := os.Getenv("S3_ENCRYPT")
secure := os.Getenv("S3_SECURE")
v4auth := os.Getenv("S3_USE_V4_AUTH")
region := os.Getenv("AWS_REGION")
root, err := ioutil.TempDir("", "driver-")
if err != nil {
panic(err)
}
defer os.Remove(root)
s3DriverConstructor := func(region aws.Region) (storagedriver.StorageDriver, error) {
encryptBool := false
if encrypt != "" {
encryptBool, err = strconv.ParseBool(encrypt)
if err != nil {
return nil, err
}
}
secureBool := true
if secure != "" {
secureBool, err = strconv.ParseBool(secure)
if err != nil {
return nil, err
}
}
v4AuthBool := true
if v4auth != "" {
v4AuthBool, err = strconv.ParseBool(v4auth)
if err != nil {
return nil, err
}
}
parameters := DriverParameters{
accessKey,
secretKey,
bucket,
region,
encryptBool,
secureBool,
v4AuthBool,
root,
}
return New(parameters)
}
// Skip S3 storage driver tests if environment variable parameters are not provided
skipCheck := func() string {
if accessKey == "" || secretKey == "" || bucket == "" || encrypt == "" |
return ""
}
// for _, region := range aws.Regions {
// if region == aws.USGovWest {
// continue
// }
testsuites.RegisterInProcessSuite(func() (storagedriver.StorageDriver, error) {
return s3DriverConstructor(aws.GetRegion(region))
}, skipCheck)
// testsuites.RegisterIPCSuite(driverName, map[string]string{
// "accesskey": accessKey,
// "secretkey": secretKey,
// "region": region.Name,
// "bucket": bucket,
// "encrypt": encrypt,
// }, skipCheck)
// }
}
| {
return "Must set AWS_ACCESS_KEY, AWS_SECRET_KEY, S3_BUCKET, and S3_ENCRYPT to run S3 tests"
} |
lib.rs | #![forbid(unsafe_code)]
mod lib_io;
pub use lib_io::AsyncPeek;
// ref https://github.com/rust-lang/futures-rs/blob/0.3.5/futures-util/src/io/mod.rs#L132-L382
pub trait AsyncPeekExt: AsyncPeek {
fn peek_async<'a>(&'a mut self, buf: &'a mut [u8]) -> Peek<'a, Self>
where
Self: Unpin,
{
Peek::new(self, buf)
}
}
impl<R: AsyncPeek + ?Sized> AsyncPeekExt for R {}
// ref https://github.com/rust-lang/futures-rs/blob/0.3.5/futures-util/src/io/read.rs
mod peek {
use crate::AsyncPeek;
use std::io;
use std::pin::Pin;
use futures_util::future::Future;
use futures_util::task::{Context, Poll};
pub struct Peek<'a, R: ?Sized> {
reader: &'a mut R,
buf: &'a mut [u8],
} |
impl<R: ?Sized + Unpin> Unpin for Peek<'_, R> {}
impl<'a, R: AsyncPeek + ?Sized + Unpin> Peek<'a, R> {
pub(super) fn new(reader: &'a mut R, buf: &'a mut [u8]) -> Self {
Peek { reader, buf }
}
}
impl<R: AsyncPeek + ?Sized + Unpin> Future for Peek<'_, R> {
type Output = io::Result<usize>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = &mut *self;
Pin::new(&mut this.reader).poll_peek(cx, this.buf)
}
}
}
pub use peek::*;
// ref https://github.com/rust-lang/futures-rs/blob/0.3.5/futures-util/src/io/cursor.rs#L163-L185
mod cursor {
use crate::AsyncPeek;
use std::io;
use std::pin::Pin;
use futures_util::io::AsyncBufRead;
use futures_util::io::Cursor;
use futures_util::task::{Context, Poll};
impl<T: AsRef<[u8]> + Unpin> AsyncPeek for Cursor<T> {
fn poll_peek(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
self.poll_fill_buf(cx).map(|r| match r {
Ok(mut bytes) => {
let n = bytes.len();
io::copy(&mut bytes, &mut Box::new(buf))?;
Ok(n)
}
Err(e) => Err(e),
})
}
}
} | |
data.py | from ..imports import *
from .. import utils as U
from .preprocessor import ImagePreprocessor
def show_image(img_path):
"""
Given file path to image, show it in Jupyter notebook
"""
if not os.path.isfile(img_path):
raise ValueError('%s is not valid file' % (img_path))
img = plt.imread(img_path)
out = plt.imshow(img)
return out
def show_random_images(img_folder, n=4, rows=1):
"""
display random images from a img_folder
"""
fnames = []
for ext in ('*.gif', '*.png', '*.jpg'):
fnames.extend(glob.glob(os.path.join(img_folder, ext)))
ims = []
for i in range(n):
img_path = random.choice(fnames)
img = image.load_img(img_path)
x = image.img_to_array(img)
x = x/255.
ims.append(x)
U.plots(ims, rows=rows)
return
def preview_data_aug(img_path, data_aug, rows=1, n=4):
"""
Preview data augmentation (ImageDatagenerator)
on a supplied image.
"""
if type(img_path) != type('') or not os.path.isfile(img_path):
raise ValueError('img_path must be valid file path to image')
idg = copy.copy(data_aug)
idg.featurewise_center = False
idg.featurewise_std_normalization = False
idg.samplewise_center = False
idg.samplewise_std_normalization = False
idg.rescale = None
idg.zca_whitening = False
idg.preprocessing_function = None
img = image.load_img(img_path)
x = image.img_to_array(img)
x = x/255.
x = x.reshape((1,) + x.shape)
i = 0
ims = []
for batch in idg.flow(x, batch_size=1):
ims.append(np.squeeze(batch))
i += 1
if i >= n: break
U.plots(ims, rows=rows)
return
def | (img_path, data_aug, n=4):
"""
Preview data augmentation (ImageDatagenerator)
on a supplied image.
"""
if type(img_path) != type('') or not os.path.isfile(img_path):
raise ValueError('img_path must be valid file path to image')
idg = copy.copy(data_aug)
idg.featurewise_center = False
idg.featurewise_std_normalization = False
idg.samplewise_center = False
idg.samplewise_std_normalization = False
idg.rescale = None
idg.zca_whitening = False
idg.preprocessing_function = None
img = image.load_img(img_path)
x = image.img_to_array(img)
x = x/255.
x = x.reshape((1,) + x.shape)
i = 0
for batch in idg.flow(x, batch_size=1):
plt.figure()
plt.imshow(np.squeeze(batch))
i += 1
if i >= n: break
return
def get_data_aug(
rotation_range=40,
zoom_range=0.2,
width_shift_range=0.2,
height_shift_range=0.2,
horizontal_flip=False,
vertical_flip=False,
featurewise_center=True,
featurewise_std_normalization=True,
samplewise_center=False,
samplewise_std_normalization=False,
rescale=None,
**kwargs):
"""
This function is simply a wrapper around ImageDataGenerator
with some reasonable defaults for data augmentation.
Returns the default image_data_generator to support
data augmentation and data normalization.
Parameters can be adjusted by caller.
Note that the ktrain.vision.model.image_classifier
function may adjust these as needed.
"""
data_aug = image.ImageDataGenerator(
rotation_range=rotation_range,
zoom_range=zoom_range,
width_shift_range=width_shift_range,
height_shift_range=height_shift_range,
horizontal_flip=horizontal_flip,
vertical_flip=vertical_flip,
featurewise_center=featurewise_center,
featurewise_std_normalization=featurewise_std_normalization,
samplewise_center=samplewise_center,
samplewise_std_normalization=samplewise_std_normalization,
rescale=rescale,
**kwargs)
return data_aug
def get_test_datagen(data_aug=None):
if data_aug:
featurewise_center = data_aug.featurewise_center
featurewise_std_normalization = data_aug.featurewise_std_normalization
samplewise_center = data_aug.samplewise_center
samplewise_std_normalization = data_aug.samplewise_std_normalization
rescale = data_aug.rescale
zca_whitening = data_aug.zca_whitening
test_datagen = image.ImageDataGenerator(
rescale=rescale,
featurewise_center=featurewise_center,
samplewise_center=samplewise_center,
featurewise_std_normalization=featurewise_std_normalization,
samplewise_std_normalization=samplewise_std_normalization,
zca_whitening=zca_whitening)
else:
test_datagen = image.ImageDataGenerator()
return test_datagen
def process_datagen(data_aug, train_array=None, train_directory=None,
target_size=None,
color_mode='rgb',
flat_dir=False):
# set generators for train and test
if data_aug is not None:
train_datagen = data_aug
test_datagen = get_test_datagen(data_aug=data_aug)
else:
train_datagen = get_test_datagen()
test_datagen = get_test_datagen()
# compute statistics for normalization
fit_datagens(train_datagen, test_datagen,
train_array=train_array,
train_directory=train_directory,
target_size=target_size,
color_mode=color_mode, flat_dir=flat_dir)
return (train_datagen, test_datagen)
def fit_datagens(train_datagen, test_datagen,
train_array=None, train_directory=None,
target_size=None,
color_mode='rgb', flat_dir=False):
"""
computes stats of images for normalization
"""
if not datagen_needs_fit(train_datagen): return
if bool(train_array is not None) == bool(train_directory):
raise ValueError('only one of train_array or train_directory is required.')
if train_array is not None:
train_datagen.fit(train_array)
test_datagen.fit(train_array)
else:
if target_size is None:
raise ValueError('target_size is required when train_directory is supplied')
fit_samples = sample_image_folder(train_directory, target_size,
color_mode=color_mode, flat_dir=flat_dir)
train_datagen.fit(fit_samples)
test_datagen.fit(fit_samples)
return
def datagen_needs_fit(datagen):
if datagen.featurewise_center or datagen.featurewise_std_normalization or \
datagen.zca_whitening:
return True
else:
return False
def sample_image_folder(train_directory,
target_size,
color_mode='rgb', flat_dir=False):
# adjust train_directory
classes = None
if flat_dir and train_directory is not None:
folder = train_directory
if folder[-1] != os.sep: folder += os.sep
parent = os.path.dirname(os.path.dirname(folder))
folder_name = os.path.basename(os.path.dirname(folder))
train_directory = parent
classes = [folder_name]
# sample images
batch_size = 100
img_gen = image.ImageDataGenerator()
batches = img_gen.flow_from_directory(
directory=train_directory,
classes=classes,
target_size=target_size,
batch_size=batch_size,
color_mode=color_mode,
shuffle=True)
the_shape = batches[0][0].shape
sample_size = the_shape[0]
if K.image_data_format() == 'channels_first':
num_channels = the_shape[1]
else:
num_channels = the_shape[-1]
imgs, labels = next(batches)
return imgs
def detect_color_mode(train_directory,
target_size=(32,32)):
try:
fname = glob.glob(os.path.join(train_directory, '**/*'))[0]
img = Image.open(fname).resize(target_size)
num_channels = len(img.getbands())
if num_channels == 3: return 'rgb'
elif num_channels == 1: return 'grayscale'
else: return 'rgby'
except:
warnings.warn('could not detect color_mode from %s' % (train_directory))
return
def preprocess_csv(csv_in, csv_out, x_col='filename', y_col=None,
sep=',', label_sep=' ', suffix='', split_by=None):
"""
Takes a CSV where the one column contains a file name and a column
containing a string representations of the class(es) like here:
image_name,tags
01, sunny|hot
02, cloudy|cold
03, cloudy|hot
.... and one-hot encodes the classes to produce a CSV as follows:
image_name, cloudy, cold, hot, sunny
01.jpg,0,0,1,1
02.jpg,1,1,0,0
03.jpg,1,0,1,0
Args:
csv_in (str): filepath to input CSV file
csv_out (str): filepath to output CSV file
x_col (str): name of column containing file names
y_col (str): name of column containing the classes
sep (str): field delimiter of entire file (e.g., comma fore CSV)
label_sep (str): delimiter for column containing classes
suffix (str): adds suffix to x_col values
split_by(str): name of column. A separate CSV will be
created for each value in column. Useful
for splitting a CSV based on whether a column
contains 'train' or 'valid'.
Return:
list : the list of clases (and csv_out will be new CSV file)
"""
if not y_col and not suffix:
raise ValueError('one or both of y_col and suffix should be supplied')
df = pd.read_csv(csv_in, sep=sep)
f_csv_out = open(csv_out, 'w')
writer = csv.writer(f_csv_out, delimiter=sep)
if y_col: df[y_col] = df[y_col].apply(str)
# write header
if y_col:
classes = set()
for row in df.iterrows():
data = row[1]
tags = data[y_col].split(label_sep)
classes.update(tags)
classes = list(classes)
classes.sort()
writer.writerow([x_col] + classes)
else:
classes = df.columns[:-1]
write.writerow(df.columns)
# write rows
for row in df.iterrows():
data = row[1]
data[x_col] = data[x_col] + suffix
if y_col:
out = list(data[[x_col]].values)
tags = set(data[y_col].strip().split(label_sep))
for c in classes:
if c in tags: out.append(1)
else: out.append(0)
else:
out = data
writer.writerow(out)
f_csv_out.close()
return classes
def images_from_folder(datadir, target_size=(224,224),
classes=None,
color_mode='rgb',
train_test_names=['train', 'test'],
data_aug=None, verbose=1):
"""
Returns image generator (Iterator instance).
Assumes output will be 2D one-hot-encoded labels for categorization.
Note: This function preprocesses the input in preparation
for a ResNet50 model.
Args:
datadir (string): path to training (or validation/test) dataset
Assumes folder follows this structure:
├── datadir
│ ├── train
│ │ ├── class0 # folder containing documents of class 0
│ │ ├── class1 # folder containing documents of class 1
│ │ ├── class2 # folder containing documents of class 2
│ │ └── classN # folder containing documents of class N
│ └── test
│ ├── class0 # folder containing documents of class 0
│ ├── class1 # folder containing documents of class 1
│ ├── class2 # folder containing documents of class 2
│ └── classN # folder containing documents of class N
target_size (tuple): image dimensions
classes (list): optional list of class subdirectories (e.g., ['cats','dogs'])
color_mode (string): color mode
train_test_names(list): names for train and test subfolders
data_aug(ImageDataGenerator): a keras.preprocessing.image.ImageDataGenerator
for data augmentation
verbose (bool): verbosity
Returns:
batches: a tuple of two Iterators - one for train and one for test
"""
# train/test names
train_str = train_test_names[0]
test_str = train_test_names[1]
train_dir = os.path.join(datadir, train_str)
test_dir = os.path.join(datadir, test_str)
# color mode warning
if PIL_INSTALLED:
inferred_color_mode = detect_color_mode(train_dir)
if inferred_color_mode is not None and (inferred_color_mode != color_mode):
U.vprint('color_mode detected (%s) different than color_mode selected (%s)' % (inferred_color_mode, color_mode),
verbose=verbose)
# get train and test data generators
(train_datagen, test_datagen) = process_datagen(data_aug,
train_directory=train_dir,
target_size=target_size,
color_mode=color_mode)
batches_tr = train_datagen.flow_from_directory(train_dir,
target_size=target_size,
classes=classes,
class_mode='categorical',
shuffle=True,
interpolation='bicubic',
color_mode = color_mode)
batches_te = test_datagen.flow_from_directory(test_dir,
target_size=target_size,
classes=classes,
class_mode='categorical',
shuffle=False,
interpolation='bicubic',
color_mode = color_mode)
# setup preprocessor
class_tup = sorted(batches_tr.class_indices.items(), key=operator.itemgetter(1))
preproc = ImagePreprocessor(test_datagen,
[x[0] for x in class_tup],
target_size=target_size,
color_mode=color_mode)
return (batches_tr, batches_te, preproc)
def images_from_df(train_df,
image_column,
label_columns=[],
directory=None,
val_directory=None,
suffix='',
val_df=None,
is_regression=False,
target_size=(224,224),
color_mode='rgb',
data_aug=None,
val_pct=0.1, random_state=None):
"""
Returns image generator (Iterator instance).
Assumes output will be 2D one-hot-encoded labels for categorization.
Note: This function preprocesses the input in preparation
for a ResNet50 model.
Args:
train_df (DataFrame): pandas dataframe for training dataset
image_column (string): name of column containing the filenames of images
If values in image_column do not have a file extension,
the extension should be supplied with suffix argument.
If values in image_column are not full file paths,
then the path to directory containing images should be supplied
as directory argument.
label_columns(list or str): list or str representing the columns that store labels
Labels can be in any one of the following formats:
1. a single column string string (or integer) labels
image_fname,label
-----------------
image01,cat
image02,dog
2. multiple columns for one-hot-encoded labels
image_fname,cat,dog
image01,1,0
image02,0,1
3. a single column of numeric values for image regression
image_fname,age
-----------------
image01,68
image02,18
directory (string): path to directory containing images
not required if image_column contains full filepaths
val_directory(strin): path to directory containing validation images.
only required if validation images are in different folder than train images
suffix(str): will be appended to each entry in image_column
Used when the filenames in image_column do not contain file extensions.
The extension in suffx should include ".".
val_df (DataFrame): pandas dataframe for validation set
is_regression(bool): If True, task is treated as regression.
Used when there is single column of numeric values and
numeric values should be treated as numeric targets as opposed to class labels
target_size (tuple): image dimensions
color_mode (string): color mode
data_aug(ImageDataGenerator): a keras.preprocessing.image.ImageDataGenerator
for data augmentation
val_pct(float): proportion of training data to be used for validation
only used if val_filepath is None
random_state(int): random seed for train/test split
Returns:
batches: a tuple of two Iterators - one for train and one for test
"""
if isinstance(label_columns, (list, np.ndarray)) and len(label_columns) == 1:
label_columns = label_columns[0]
peek = train_df[label_columns].iloc[0]
if isinstance(label_columns, str) and peek.isdigit() and not is_regression:
warnings.warn('Targets are integers, but is_regression=False. Task treated as classification instead of regression.')
if isinstance(peek, str) and is_regression:
train_df[label_columns] = train_df[label_columns].astype('float32')
if val_df is not None:
val_df[label_columns] = val_df[label_columns].astype('float32')
peek = train_df[label_columns].iloc[0]
# get train and test data generators
if directory:
img_folder = directory
else:
img_folder = os.path.dirname(train_df[image_column].iloc[0])
(train_datagen, test_datagen) = process_datagen(data_aug,
train_directory=img_folder,
target_size=target_size,
color_mode=color_mode,
flat_dir=True)
# convert to dataframes
if val_df is None:
if val_pct:
df = train_df.copy()
prop = 1-val_pct
if random_state is not None: np.random.seed(42)
msk = np.random.rand(len(df)) < prop
train_df = df[msk]
val_df = df[~msk]
# class names
if isinstance(label_columns, (list, np.ndarray)): label_columns.sort()
# fix file extensions, if necessary
if suffix:
train_df = train_df.copy()
val_df = val_df.copy()
train_df[image_column] = train_df.copy()[image_column].apply(lambda x : x+suffix)
val_df[image_column] = val_df.copy()[image_column].apply(lambda x : x+suffix)
# TODO: replace/standardize with YTransform or YTransformDataFrame
# 1-hot-encode string or integer labels
if isinstance(label_columns, str) or \
(isinstance(label_columns, (list, np.ndarray)) and len(label_columns) == 1):
label_col_name = label_columns if isinstance(label_columns, str) else label_columns[0]
#if not isinstance(df[label_col_name].values[0], str):
#raise ValueError('If a single label column is provided, labels must be in the form of a string.')
if not is_regression:
le = LabelEncoder()
train_labels = train_df[label_col_name].values
le.fit(train_labels)
y_train = to_categorical(le.transform(train_labels))
y_val = to_categorical(le.transform(val_df[label_col_name].values))
y_train_pd = [y_train[:,i] for i in range(y_train.shape[1])]
y_val_pd = [y_val[:,i] for i in range(y_val.shape[1])]
label_columns = list(le.classes_)
train_df = pd.DataFrame(zip(train_df[image_column].values, *y_train_pd), columns=[image_column]+label_columns)
val_df = pd.DataFrame(zip(val_df[image_column].values, *y_val_pd), columns=[image_column]+label_columns)
batches_tr = train_datagen.flow_from_dataframe(
train_df,
directory=directory,
x_col = image_column,
y_col=label_columns,
target_size=target_size,
class_mode='other',
shuffle=True,
interpolation='bicubic',
color_mode = color_mode)
batches_te = None
if val_df is not None:
d = val_directory if val_directory is not None else directory
batches_te = test_datagen.flow_from_dataframe(
val_df,
directory=d,
x_col = image_column,
y_col=label_columns,
target_size=target_size,
class_mode='other',
shuffle=False,
interpolation='bicubic',
color_mode = color_mode)
# setup preprocessor
preproc = ImagePreprocessor(test_datagen,
label_columns,
target_size=target_size,
color_mode=color_mode)
return (batches_tr, batches_te, preproc)
def images_from_csv(train_filepath,
image_column,
label_columns=[],
directory=None,
suffix='',
val_filepath=None,
is_regression=False,
target_size=(224,224),
color_mode='rgb',
data_aug=None,
val_pct=0.1, random_state=None):
"""
Returns image generator (Iterator instance).
Assumes output will be 2D one-hot-encoded labels for categorization.
Note: This function preprocesses the input in preparation
for a ResNet50 model.
Args:
train_filepath (string): path to training dataset in CSV format with header row
image_column (string): name of column containing the filenames of images
If values in image_column do not have a file extension,
the extension should be supplied with suffix argument.
If values in image_column are not full file paths,
then the path to directory containing images should be supplied
as directory argument.
label_columns(list or str): list or str representing the columns that store labels
Labels can be in any one of the following formats:
1. a single column string string (or integer) labels
image_fname,label
-----------------
image01,cat
image02,dog
2. multiple columns for one-hot-encoded labels
image_fname,cat,dog
image01,1,0
image02,0,1
3. a single column of numeric values for image regression
image_fname,age
-----------------
image01,68
image02,18
directory (string): path to directory containing images
not required if image_column contains full filepaths
suffix(str): will be appended to each entry in image_column
Used when the filenames in image_column do not contain file extensions.
The extension in suffx should include ".".
val_filepath (string): path to validation dataset in CSV format
suffix(string): suffix to add to file names in image_column
is_regression(bool): If True, task is treated as regression.
Used when there is single column of numeric values and
numeric values should be treated as numeric targets as opposed to class labels
target_size (tuple): image dimensions
color_mode (string): color mode
data_aug(ImageDataGenerator): a keras.preprocessing.image.ImageDataGenerator
for data augmentation
val_pct(float): proportion of training data to be used for validation
only used if val_filepath is None
random_state(int): random seed for train/test split
Returns:
batches: a tuple of two Iterators - one for train and one for test
"""
# convert to dataframes
train_df = pd.read_csv(train_filepath)
val_df = None
if val_filepath is not None:
val_df = pd.read_csv(val_filepath)
return images_from_df(train_df,
image_column,
label_columns=label_columns,
directory=directory,
suffix=suffix,
val_df=val_df,
is_regression=is_regression,
target_size=target_size,
color_mode=color_mode,
data_aug=data_aug,
val_pct=val_pct, random_state=random_state)
def images_from_fname( train_folder,
pattern=r'([^/]+)_\d+.jpg$',
val_folder=None,
is_regression=False,
target_size=(224,224),
color_mode='rgb',
data_aug=None,
val_pct=0.1, random_state=None,
verbose=1):
"""
Returns image generator (Iterator instance).
Args:
train_folder (str): directory containing images
pat (str): regular expression to extract class from file name of each image
Example: r'([^/]+)_\d+.jpg$' to match 'english_setter' in 'english_setter_140.jpg'
By default, it will extract classes from file names of the form:
<class_name>_<numbers>.jpg
val_folder (str): directory containing validation images. default:None
is_regression(bool): If True, task is treated as regression.
Used when there is single column of numeric values and
numeric values should be treated as numeric targets as opposed to class labels
target_size (tuple): image dimensions
color_mode (string): color mode
data_aug(ImageDataGenerator): a keras.preprocessing.image.ImageDataGenerator
for data augmentation
val_pct(float): proportion of training data to be used for validation
only used if val_folder is None
random_state(int): random seed for train/test split
verbose(bool): verbosity
Returns:
batches: a tuple of two Iterators - one for train and one for test
"""
image_column = 'image_name'
label_column = 'label'
train_df = _img_fnames_to_df(train_folder, pattern,
image_column=image_column, label_column=label_column, verbose=verbose)
val_df = None
if val_folder is not None:
val_df = _img_fnames_to_df(val_folder, pattern,
image_column=image_column, label_column=label_column, verbose=verbose)
return images_from_df(train_df,
image_column,
label_columns=label_column,
directory=train_folder,
val_directory=val_folder,
val_df=val_df,
is_regression=is_regression,
target_size=target_size,
color_mode=color_mode,
data_aug=data_aug,
val_pct=val_pct, random_state=random_state)
def _img_fnames_to_df(img_folder,pattern, image_column='image_name', label_column='label', verbose=1):
# get fnames
fnames = []
for ext in ('*.gif', '*.png', '*.jpg'):
fnames.extend(glob.glob(os.path.join(img_folder, ext)))
# process filenames and labels
image_names = []
labels = []
p = re.compile(pattern)
for fname in fnames:
r = p.search(fname)
if r:
image_names.append(os.path.basename(fname))
labels.append(r.group(1))
else:
warnings.warn('Could not extract target for %s - skipping this file'% (fname))
dct = {'image_name': image_names, 'label':labels}
return pd.DataFrame(dct)
# class_names = list(set(labels))
# class_names.sort()
# c2i = {k:v for v,k in enumerate(class_names)}
# labels = [c2i[label] for label in labels]
# labels = to_categorical(labels)
# #class_names = [str(c) in class_names]
# U.vprint('Found %s classes: %s' % (len(class_names), class_names), verbose=verbose)
# U.vprint('y shape: (%s,%s)' % (labels.shape[0], labels.shape[1]), verbose=verbose)
# dct = {'image_name': image_names}
# for i in range(labels.shape[1]):
# dct[class_names[i]] = labels[:,i]
# # convert to dataframes
# df = pd.DataFrame(dct)
# return (df, class_names)
def images_from_array(x_train, y_train,
validation_data=None,
val_pct=0.1,
random_state=None,
data_aug=None,
classes=None,
class_names=None,
is_regression=False):
"""
Returns image generator (Iterator instance) from training
and validation data in the form of NumPy arrays.
This function only supports image classification.
For image regression, please use images_from_df.
Args:
x_train(numpy.ndarray): training gdata
y_train(numpy.ndarray): labels must either be:
1. one-hot (or multi-hot) encoded arrays
2. integer values representing the label
validation_data (tuple): tuple of numpy.ndarrays for validation data.
labels should be in one of the formats listed above.
val_pct(float): percentage of training data to use for validaton if validation_data is None
random_state(int): random state to use for splitting data
data_aug(ImageDataGenerator): a keras.preprocessing.image.ImageDataGenerator
classes(str): old name for class_names - should no longer be used
class_names(str): list of strings to use as class names
is_regression(bool): If True, task is treated as regression.
Used when there is single column of numeric values and
numeric values should be treated as numeric targets as opposed to class labels
Returns:
batches: a tuple of two image.Iterator - one for train and one for test and ImagePreprocessor instance
"""
if classes is not None: raise ValueError('Please use class_names argument instead of "classes".')
if class_names and is_regression:
warnings.warn('is_regression=True, but class_names is not empty. Task treated as regression.')
# TODO: replace/standardize with YTransform
# one-hot-encode if necessary
do_y_transform = False
if np.issubdtype(type(y_train[0]), np.integer) or np.issubdtype(type(y_train[0]), np.floating) or\
(isinstance(y_train[0], (list, np.ndarray)) and len(y_train[0]) == 1):
if not is_regression:
if np.issubdtype(type(y_train[0]), np.integer) and not class_names:
warnings.warn('Targets are integers, but is_regression=False. Task treated as classification instead of regression.')
y_train = to_categorical(y_train)
do_y_transform = True
if validation_data:
x_test = validation_data[0]
y_test = validation_data[1]
if do_y_transform:
y_test = to_categorical(y_test)
elif val_pct is not None and val_pct >0:
x_train, x_test, y_train, y_test = train_test_split(x_train, y_train,
test_size=val_pct,
random_state=random_state)
else:
x_test = None
y_test = None
# set class labels
if not class_names and not is_regression:
class_names = list(map(str, range(len(y_train[0]))))
elif class_names and not is_regression:
assert len(class_names) == len(y_train[0]), \
"Number of classes has to match length of the one-hot encoding"
# train and test data generators
(train_datagen, test_datagen) = process_datagen(data_aug, train_array=x_train)
# Image preprocessor
preproc = ImagePreprocessor(test_datagen, class_names, target_size=None, color_mode=None)
# training data
batches_tr = train_datagen.flow(x_train, y_train, shuffle=True)
# validation data
batches_te = None
if x_test is not None and y_test is not None:
batches_te = test_datagen.flow(x_test, y_test,
shuffle=False)
return (batches_tr, batches_te, preproc)
| preview_data_aug_OLD |
channelnotifier.go | package channelnotifier
import (
"sync"
"github.com/btcsuite/btcd/wire"
"github.com/monasuite/lnd/channeldb"
"github.com/monasuite/lnd/subscribe"
)
// ChannelNotifier is a subsystem which all active, inactive, and closed channel
// events pipe through. It takes subscriptions for its events, and whenever
// it receives a new event it notifies its subscribers over the proper channel.
type ChannelNotifier struct {
started sync.Once
stopped sync.Once
ntfnServer *subscribe.Server
chanDB *channeldb.DB
}
// PendingOpenChannelEvent represents a new event where a new channel has
// entered a pending open state.
type PendingOpenChannelEvent struct {
// ChannelPoint is the channel outpoint for the new channel.
ChannelPoint *wire.OutPoint
// PendingChannel is the channel configuration for the newly created
// channel. This might not have been persisted to the channel DB yet
// because we are still waiting for the final message from the remote
// peer.
PendingChannel *channeldb.OpenChannel
}
// OpenChannelEvent represents a new event where a channel goes from pending
// open to open.
type OpenChannelEvent struct {
// Channel is the channel that has become open.
Channel *channeldb.OpenChannel
}
// ActiveLinkEvent represents a new event where the link becomes active in the
// switch. This happens before the ActiveChannelEvent.
type ActiveLinkEvent struct {
// ChannelPoint is the channel point for the newly active channel.
ChannelPoint *wire.OutPoint
}
// ActiveChannelEvent represents a new event where a channel becomes active.
type ActiveChannelEvent struct {
// ChannelPoint is the channelpoint for the newly active channel.
ChannelPoint *wire.OutPoint
}
// InactiveChannelEvent represents a new event where a channel becomes inactive.
type InactiveChannelEvent struct {
// ChannelPoint is the channelpoint for the newly inactive channel.
ChannelPoint *wire.OutPoint
}
// ClosedChannelEvent represents a new event where a channel becomes closed.
type ClosedChannelEvent struct {
// CloseSummary is the summary of the channel close that has occurred.
CloseSummary *channeldb.ChannelCloseSummary
}
// New creates a new channel notifier. The ChannelNotifier gets channel
// events from peers and from the chain arbitrator, and dispatches them to
// its clients.
func New(chanDB *channeldb.DB) *ChannelNotifier |
// Start starts the ChannelNotifier and all goroutines it needs to carry out its task.
func (c *ChannelNotifier) Start() error {
var err error
c.started.Do(func() {
log.Trace("ChannelNotifier starting")
err = c.ntfnServer.Start()
})
return err
}
// Stop signals the notifier for a graceful shutdown.
func (c *ChannelNotifier) Stop() error {
var err error
c.stopped.Do(func() {
err = c.ntfnServer.Stop()
})
return err
}
// SubscribeChannelEvents returns a subscribe.Client that will receive updates
// any time the Server is made aware of a new event. The subscription provides
// channel events from the point of subscription onwards.
//
// TODO(carlaKC): update to allow subscriptions to specify a block height from
// which we would like to subscribe to events.
func (c *ChannelNotifier) SubscribeChannelEvents() (*subscribe.Client, error) {
return c.ntfnServer.Subscribe()
}
// NotifyPendingOpenChannelEvent notifies the channelEventNotifier goroutine
// that a new channel is pending. The pending channel is passed as a parameter
// instead of read from the database because it might not yet have been
// persisted to the DB because we still wait for the final message from the
// remote peer.
func (c *ChannelNotifier) NotifyPendingOpenChannelEvent(chanPoint wire.OutPoint,
pendingChan *channeldb.OpenChannel) {
event := PendingOpenChannelEvent{
ChannelPoint: &chanPoint,
PendingChannel: pendingChan,
}
if err := c.ntfnServer.SendUpdate(event); err != nil {
log.Warnf("Unable to send pending open channel update: %v", err)
}
}
// NotifyOpenChannelEvent notifies the channelEventNotifier goroutine that a
// channel has gone from pending open to open.
func (c *ChannelNotifier) NotifyOpenChannelEvent(chanPoint wire.OutPoint) {
// Fetch the relevant channel from the database.
channel, err := c.chanDB.FetchChannel(chanPoint)
if err != nil {
log.Warnf("Unable to fetch open channel from the db: %v", err)
}
// Send the open event to all channel event subscribers.
event := OpenChannelEvent{Channel: channel}
if err := c.ntfnServer.SendUpdate(event); err != nil {
log.Warnf("Unable to send open channel update: %v", err)
}
}
// NotifyClosedChannelEvent notifies the channelEventNotifier goroutine that a
// channel has closed.
func (c *ChannelNotifier) NotifyClosedChannelEvent(chanPoint wire.OutPoint) {
// Fetch the relevant closed channel from the database.
closeSummary, err := c.chanDB.FetchClosedChannel(&chanPoint)
if err != nil {
log.Warnf("Unable to fetch closed channel summary from the db: %v", err)
}
// Send the closed event to all channel event subscribers.
event := ClosedChannelEvent{CloseSummary: closeSummary}
if err := c.ntfnServer.SendUpdate(event); err != nil {
log.Warnf("Unable to send closed channel update: %v", err)
}
}
// NotifyActiveLinkEvent notifies the channelEventNotifier goroutine that a
// link has been added to the switch.
func (c *ChannelNotifier) NotifyActiveLinkEvent(chanPoint wire.OutPoint) {
event := ActiveLinkEvent{ChannelPoint: &chanPoint}
if err := c.ntfnServer.SendUpdate(event); err != nil {
log.Warnf("Unable to send active link update: %v", err)
}
}
// NotifyActiveChannelEvent notifies the channelEventNotifier goroutine that a
// channel is active.
func (c *ChannelNotifier) NotifyActiveChannelEvent(chanPoint wire.OutPoint) {
event := ActiveChannelEvent{ChannelPoint: &chanPoint}
if err := c.ntfnServer.SendUpdate(event); err != nil {
log.Warnf("Unable to send active channel update: %v", err)
}
}
// NotifyInactiveChannelEvent notifies the channelEventNotifier goroutine that a
// channel is inactive.
func (c *ChannelNotifier) NotifyInactiveChannelEvent(chanPoint wire.OutPoint) {
event := InactiveChannelEvent{ChannelPoint: &chanPoint}
if err := c.ntfnServer.SendUpdate(event); err != nil {
log.Warnf("Unable to send inactive channel update: %v", err)
}
}
| {
return &ChannelNotifier{
ntfnServer: subscribe.NewServer(),
chanDB: chanDB,
}
} |
int.rs | use std::io::Write;
use std::mem::size_of;
use super::Options;
use de::read::BincodeRead;
use error::{ErrorKind, Result};
pub trait IntEncoding {
/// Gets the size (in bytes) that a value would be serialized to.
fn u16_size(n: u16) -> u64;
/// Gets the size (in bytes) that a value would be serialized to.
fn u32_size(n: u32) -> u64;
/// Gets the size (in bytes) that a value would be serialized to.
fn u64_size(n: u64) -> u64;
/// Gets the size (in bytes) that a value would be serialized to.
fn i16_size(n: i16) -> u64;
/// Gets the size (in bytes) that a value would be serialized to.
fn i32_size(n: i32) -> u64;
/// Gets the size (in bytes) that a value would be serialized to.
fn i64_size(n: i64) -> u64;
#[inline(always)]
fn len_size(len: usize) -> u64 {
Self::u64_size(len as u64)
}
/// Serializes a sequence length.
#[inline(always)]
fn serialize_len<W: Write, O: Options>(
ser: &mut ::ser::Serializer<W, O>,
len: usize,
) -> Result<()> {
Self::serialize_u64(ser, len as u64)
}
fn serialize_u16<W: Write, O: Options>(
ser: &mut ::ser::Serializer<W, O>,
val: u16,
) -> Result<()>;
fn serialize_u32<W: Write, O: Options>(
ser: &mut ::ser::Serializer<W, O>,
val: u32,
) -> Result<()>;
fn serialize_u64<W: Write, O: Options>(
ser: &mut ::ser::Serializer<W, O>,
val: u64,
) -> Result<()>;
fn serialize_i16<W: Write, O: Options>(
ser: &mut ::ser::Serializer<W, O>,
val: i16,
) -> Result<()>;
fn serialize_i32<W: Write, O: Options>(
ser: &mut ::ser::Serializer<W, O>,
val: i32,
) -> Result<()>;
fn serialize_i64<W: Write, O: Options>(
ser: &mut ::ser::Serializer<W, O>,
val: i64,
) -> Result<()>;
/// Deserializes a sequence length.
#[inline(always)]
fn deserialize_len<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::de::Deserializer<R, O>,
) -> Result<usize> {
Self::deserialize_u64(de).and_then(cast_u64_to_usize)
}
fn deserialize_u16<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::de::Deserializer<R, O>,
) -> Result<u16>;
fn deserialize_u32<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::de::Deserializer<R, O>,
) -> Result<u32>;
fn deserialize_u64<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::de::Deserializer<R, O>,
) -> Result<u64>;
fn deserialize_i16<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::de::Deserializer<R, O>,
) -> Result<i16>;
fn deserialize_i32<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::de::Deserializer<R, O>,
) -> Result<i32>;
fn deserialize_i64<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::de::Deserializer<R, O>,
) -> Result<i64>;
serde_if_integer128! {
fn u128_size(v: u128) -> u64;
fn i128_size(v: i128) -> u64;
fn serialize_u128<W: Write, O: Options>(
ser: &mut ::Serializer<W, O>,
val: u128,
) -> Result<()>;
fn deserialize_u128<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<u128>;
fn serialize_i128<W: Write, O: Options>(
ser: &mut ::Serializer<W, O>,
val: i128,
) -> Result<()>;
fn deserialize_i128<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<i128>;
}
}
/// Fixed-size integer encoding.
///
/// * Fixed size integers are encoded directly
/// * Enum discriminants are encoded as u32
/// * Lengths and usize are encoded as u64
#[derive(Copy, Clone)]
pub struct FixintEncoding;
/// Variable-size integer encoding (excepting [ui]8).
///
/// Encoding an unsigned integer v (of any type excepting u8) works as follows:
///
/// 1. If `u < 251`, encode it as a single byte with that value.
/// 2. If `251 <= u < 2**16`, encode it as a literal byte 251, followed by a u16 with value `u`.
/// 3. If `2**16 <= u < 2**32`, encode it as a literal byte 252, followed by a u32 with value `u`.
/// 4. If `2**32 <= u < 2**64`, encode it as a literal byte 253, followed by a u64 with value `u`.
/// 5. If `2**64 <= u < 2**128`, encode it as a literal byte 254, followed by a
/// u128 with value `u`.
///
/// Then, for signed integers, we first convert to unsigned using the zigzag algorithm,
/// and then encode them as we do for unsigned integers generally. The reason we use this
/// algorithm is that it encodes those values which are close to zero in less bytes; the
/// obvious algorithm, where we encode the cast values, gives a very large encoding for all
/// negative values.
///
/// The zigzag algorithm is defined as follows:
///
/// ```ignore
/// fn zigzag(v: Signed) -> Unsigned {
/// match v {
/// 0 => 0,
/// v if v < 0 => |v| * 2 - 1
/// v if v > 0 => v * 2
/// }
/// }
/// ```
///
/// And works such that:
///
/// ```ignore
/// assert_eq!(zigzag(0), 0);
/// assert_eq!(zigzag(-1), 1);
/// assert_eq!(zigzag(1), 2);
/// assert_eq!(zigzag(-2), 3);
/// assert_eq!(zigzag(2), 4);
/// assert_eq!(zigzag(i64::min_value()), u64::max_value());
/// ```
///
/// Note that u256 and the like are unsupported by this format; if and when they are added to the
/// language, they may be supported via the extension point given by the 255 byte.
#[derive(Copy, Clone)]
pub struct VarintEncoding;
const SINGLE_BYTE_MAX: u8 = 250;
const U16_BYTE: u8 = 251;
const U32_BYTE: u8 = 252;
const U64_BYTE: u8 = 253;
const U128_BYTE: u8 = 254;
const DESERIALIZE_EXTENSION_POINT_ERR: &str = r#"
Byte 255 is treated as an extension point; it should not be encoding anything.
Do you have a mismatched bincode version or configuration?
"#;
impl VarintEncoding {
fn varint_size(n: u64) -> u64 {
if n <= SINGLE_BYTE_MAX as u64 {
1
} else if n <= u16::max_value() as u64 {
(1 + size_of::<u16>()) as u64
} else if n <= u32::max_value() as u64 {
(1 + size_of::<u32>()) as u64
} else {
(1 + size_of::<u64>()) as u64
}
}
#[inline(always)]
fn zigzag_encode(n: i64) -> u64 {
if n < 0 {
// let's avoid the edge case of i64::min_value()
// !n is equal to `-n - 1`, so this is:
// !n * 2 + 1 = 2(-n - 1) + 1 = -2n - 2 + 1 = -2n - 1
!(n as u64) * 2 + 1
} else {
(n as u64) * 2
}
}
#[inline(always)]
fn zigzag_decode(n: u64) -> i64 {
if n % 2 == 0 {
// positive number
(n / 2) as i64
} else {
// negative number
// !m * 2 + 1 = n
// !m * 2 = n - 1
// !m = (n - 1) / 2
// m = !((n - 1) / 2)
// since we have n is odd, we have floor(n / 2) = floor((n - 1) / 2)
!(n / 2) as i64
}
}
fn serialize_varint<W: Write, O: Options>(
ser: &mut ::ser::Serializer<W, O>,
n: u64,
) -> Result<()> {
if n <= SINGLE_BYTE_MAX as u64 {
ser.serialize_byte(n as u8)
} else if n <= u16::max_value() as u64 {
ser.serialize_byte(U16_BYTE)?;
ser.serialize_literal_u16(n as u16)
} else if n <= u32::max_value() as u64 {
ser.serialize_byte(U32_BYTE)?;
ser.serialize_literal_u32(n as u32)
} else {
ser.serialize_byte(U64_BYTE)?;
ser.serialize_literal_u64(n as u64)
}
}
fn deserialize_varint<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::de::Deserializer<R, O>,
) -> Result<u64> {
#[allow(ellipsis_inclusive_range_patterns)]
match de.deserialize_byte()? {
byte @ 0...SINGLE_BYTE_MAX => Ok(byte as u64),
U16_BYTE => Ok(de.deserialize_literal_u16()? as u64),
U32_BYTE => Ok(de.deserialize_literal_u32()? as u64),
U64_BYTE => de.deserialize_literal_u64(),
U128_BYTE => Err(Box::new(ErrorKind::Custom(
"Invalid value (u128 range): you may have a version or configuration disagreement?"
.to_string(),
))),
_ => Err(Box::new(ErrorKind::Custom(
DESERIALIZE_EXTENSION_POINT_ERR.to_string(),
))),
}
}
serde_if_integer128! {
// see zigzag_encode and zigzag_decode for implementation comments
#[inline(always)]
fn zigzag128_encode(n: i128) -> u128 {
if n < 0 {
!(n as u128) * 2 + 1
} else {
(n as u128) * 2
}
}
#[inline(always)]
fn zigzag128_decode(n: u128) -> i128 {
if n % 2 == 0 {
(n / 2) as i128
} else {
!(n / 2) as i128
}
}
fn varint128_size(n: u128) -> u64 {
if n <= SINGLE_BYTE_MAX as u128 {
1
} else if n <= u16::max_value() as u128 {
(1 + size_of::<u16>()) as u64
} else if n <= u32::max_value() as u128 {
(1 + size_of::<u32>()) as u64
} else if n <= u64::max_value() as u128 {
(1 + size_of::<u64>()) as u64
} else {
(1 + size_of::<u128>()) as u64
}
}
fn serialize_varint128<W: Write, O: Options>(
ser: &mut ::ser::Serializer<W, O>,
n: u128,
) -> Result<()> {
if n <= SINGLE_BYTE_MAX as u128 {
ser.serialize_byte(n as u8)
} else if n <= u16::max_value() as u128 {
ser.serialize_byte(U16_BYTE)?;
ser.serialize_literal_u16(n as u16)
} else if n <= u32::max_value() as u128 {
ser.serialize_byte(U32_BYTE)?;
ser.serialize_literal_u32(n as u32)
} else if n <= u64::max_value() as u128 {
ser.serialize_byte(U64_BYTE)?;
ser.serialize_literal_u64(n as u64)
} else {
ser.serialize_byte(U128_BYTE)?;
ser.serialize_literal_u128(n)
}
}
fn deserialize_varint128<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::de::Deserializer<R, O>,
) -> Result<u128> {
#[allow(ellipsis_inclusive_range_patterns)]
match de.deserialize_byte()? {
byte @ 0...SINGLE_BYTE_MAX => Ok(byte as u128),
U16_BYTE => Ok(de.deserialize_literal_u16()? as u128),
U32_BYTE => Ok(de.deserialize_literal_u32()? as u128),
U64_BYTE => Ok(de.deserialize_literal_u64()? as u128),
U128_BYTE => de.deserialize_literal_u128(),
_ => Err(Box::new(ErrorKind::Custom(DESERIALIZE_EXTENSION_POINT_ERR.to_string()))),
}
}
}
}
impl IntEncoding for FixintEncoding {
#[inline(always)]
fn u16_size(_: u16) -> u64 {
size_of::<u16>() as u64
}
#[inline(always)]
fn u32_size(_: u32) -> u64 {
size_of::<u32>() as u64
}
#[inline(always)]
fn u64_size(_: u64) -> u64 {
size_of::<u64>() as u64
}
#[inline(always)]
fn i16_size(_: i16) -> u64 {
size_of::<i16>() as u64
}
#[inline(always)]
fn i32_size(_: i32) -> u64 {
size_of::<i32>() as u64
}
#[inline(always)]
fn i64_size(_: i64) -> u64 {
size_of::<i64>() as u64
}
#[inline(always)]
fn serialize_u16<W: Write, O: Options>(ser: &mut ::Serializer<W, O>, val: u16) -> Result<()> {
ser.serialize_literal_u16(val)
}
#[inline(always)]
fn serialize_u32<W: Write, O: Options>(ser: &mut ::Serializer<W, O>, val: u32) -> Result<()> {
ser.serialize_literal_u32(val)
}
#[inline(always)]
fn serialize_u64<W: Write, O: Options>(ser: &mut ::Serializer<W, O>, val: u64) -> Result<()> {
ser.serialize_literal_u64(val)
}
#[inline(always)]
fn serialize_i16<W: Write, O: Options>(ser: &mut ::Serializer<W, O>, val: i16) -> Result<()> |
#[inline(always)]
fn serialize_i32<W: Write, O: Options>(ser: &mut ::Serializer<W, O>, val: i32) -> Result<()> {
ser.serialize_literal_u32(val as u32)
}
#[inline(always)]
fn serialize_i64<W: Write, O: Options>(ser: &mut ::Serializer<W, O>, val: i64) -> Result<()> {
ser.serialize_literal_u64(val as u64)
}
#[inline(always)]
fn deserialize_u16<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<u16> {
de.deserialize_literal_u16()
}
#[inline(always)]
fn deserialize_u32<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<u32> {
de.deserialize_literal_u32()
}
#[inline(always)]
fn deserialize_u64<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<u64> {
de.deserialize_literal_u64()
}
#[inline(always)]
fn deserialize_i16<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<i16> {
Ok(de.deserialize_literal_u16()? as i16)
}
#[inline(always)]
fn deserialize_i32<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<i32> {
Ok(de.deserialize_literal_u32()? as i32)
}
#[inline(always)]
fn deserialize_i64<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<i64> {
Ok(de.deserialize_literal_u64()? as i64)
}
serde_if_integer128! {
#[inline(always)]
fn u128_size(_: u128) -> u64{
size_of::<u128>() as u64
}
#[inline(always)]
fn i128_size(_: i128) -> u64{
size_of::<i128>() as u64
}
#[inline(always)]
fn serialize_u128<W: Write, O: Options>(
ser: &mut ::Serializer<W, O>,
val: u128,
) -> Result<()> {
ser.serialize_literal_u128(val)
}
#[inline(always)]
fn serialize_i128<W: Write, O: Options>(
ser: &mut ::Serializer<W, O>,
val: i128,
) -> Result<()> {
ser.serialize_literal_u128(val as u128)
}
#[inline(always)]
fn deserialize_u128<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<u128> {
de.deserialize_literal_u128()
}
#[inline(always)]
fn deserialize_i128<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<i128> {
Ok(de.deserialize_literal_u128()? as i128)
}
}
}
impl IntEncoding for VarintEncoding {
#[inline(always)]
fn u16_size(n: u16) -> u64 {
Self::varint_size(n as u64)
}
#[inline(always)]
fn u32_size(n: u32) -> u64 {
Self::varint_size(n as u64)
}
#[inline(always)]
fn u64_size(n: u64) -> u64 {
Self::varint_size(n)
}
#[inline(always)]
fn i16_size(n: i16) -> u64 {
Self::varint_size(Self::zigzag_encode(n as i64))
}
#[inline(always)]
fn i32_size(n: i32) -> u64 {
Self::varint_size(Self::zigzag_encode(n as i64))
}
#[inline(always)]
fn i64_size(n: i64) -> u64 {
Self::varint_size(Self::zigzag_encode(n))
}
#[inline(always)]
fn serialize_u16<W: Write, O: Options>(ser: &mut ::Serializer<W, O>, val: u16) -> Result<()> {
Self::serialize_varint(ser, val as u64)
}
#[inline(always)]
fn serialize_u32<W: Write, O: Options>(ser: &mut ::Serializer<W, O>, val: u32) -> Result<()> {
Self::serialize_varint(ser, val as u64)
}
#[inline(always)]
fn serialize_u64<W: Write, O: Options>(ser: &mut ::Serializer<W, O>, val: u64) -> Result<()> {
Self::serialize_varint(ser, val)
}
#[inline(always)]
fn serialize_i16<W: Write, O: Options>(ser: &mut ::Serializer<W, O>, val: i16) -> Result<()> {
Self::serialize_varint(ser, Self::zigzag_encode(val as i64))
}
#[inline(always)]
fn serialize_i32<W: Write, O: Options>(ser: &mut ::Serializer<W, O>, val: i32) -> Result<()> {
Self::serialize_varint(ser, Self::zigzag_encode(val as i64))
}
#[inline(always)]
fn serialize_i64<W: Write, O: Options>(ser: &mut ::Serializer<W, O>, val: i64) -> Result<()> {
Self::serialize_varint(ser, Self::zigzag_encode(val))
}
#[inline(always)]
fn deserialize_u16<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<u16> {
Self::deserialize_varint(de).and_then(cast_u64_to_u16)
}
#[inline(always)]
fn deserialize_u32<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<u32> {
Self::deserialize_varint(de).and_then(cast_u64_to_u32)
}
#[inline(always)]
fn deserialize_u64<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<u64> {
Self::deserialize_varint(de)
}
#[inline(always)]
fn deserialize_i16<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<i16> {
Self::deserialize_varint(de)
.map(Self::zigzag_decode)
.and_then(cast_i64_to_i16)
}
#[inline(always)]
fn deserialize_i32<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<i32> {
Self::deserialize_varint(de)
.map(Self::zigzag_decode)
.and_then(cast_i64_to_i32)
}
#[inline(always)]
fn deserialize_i64<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<i64> {
Self::deserialize_varint(de).map(Self::zigzag_decode)
}
serde_if_integer128! {
#[inline(always)]
fn u128_size(n: u128) -> u64 {
Self::varint128_size(n)
}
#[inline(always)]
fn i128_size(n: i128) -> u64 {
Self::varint128_size(Self::zigzag128_encode(n))
}
#[inline(always)]
fn serialize_u128<W: Write, O: Options>(
ser: &mut ::Serializer<W, O>,
val: u128,
) -> Result<()> {
Self::serialize_varint128(ser, val)
}
#[inline(always)]
fn serialize_i128<W: Write, O: Options>(
ser: &mut ::Serializer<W, O>,
val: i128,
) -> Result<()> {
Self::serialize_varint128(ser, Self::zigzag128_encode(val))
}
#[inline(always)]
fn deserialize_u128<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<u128> {
Self::deserialize_varint128(de)
}
#[inline(always)]
fn deserialize_i128<'de, R: BincodeRead<'de>, O: Options>(
de: &mut ::Deserializer<R, O>,
) -> Result<i128> {
Self::deserialize_varint128(de).map(Self::zigzag128_decode)
}
}
}
fn cast_u64_to_usize(n: u64) -> Result<usize> {
if n <= usize::max_value() as u64 {
Ok(n as usize)
} else {
Err(Box::new(ErrorKind::Custom(format!(
"Invalid size {}: sizes must fit in a usize (0 to {})",
n,
usize::max_value()
))))
}
}
fn cast_u64_to_u32(n: u64) -> Result<u32> {
if n <= u32::max_value() as u64 {
Ok(n as u32)
} else {
Err(Box::new(ErrorKind::Custom(format!(
"Invalid u32 {}: you may have a version disagreement?",
n,
))))
}
}
fn cast_u64_to_u16(n: u64) -> Result<u16> {
if n <= u16::max_value() as u64 {
Ok(n as u16)
} else {
Err(Box::new(ErrorKind::Custom(format!(
"Invalid u16 {}: you may have a version disagreement?",
n,
))))
}
}
fn cast_i64_to_i32(n: i64) -> Result<i32> {
if n <= i32::max_value() as i64 && n >= i32::min_value() as i64 {
Ok(n as i32)
} else {
Err(Box::new(ErrorKind::Custom(format!(
"Invalid i32 {}: you may have a version disagreement?",
n,
))))
}
}
fn cast_i64_to_i16(n: i64) -> Result<i16> {
if n <= i16::max_value() as i64 && n >= i16::min_value() as i64 {
Ok(n as i16)
} else {
Err(Box::new(ErrorKind::Custom(format!(
"Invalid i16 {}: you may have a version disagreement?",
n,
))))
}
}
#[cfg(test)]
mod test {
use super::VarintEncoding;
#[test]
fn test_zigzag_encode() {
let zigzag = VarintEncoding::zigzag_encode;
assert_eq!(zigzag(0), 0);
for x in 1..512 {
assert_eq!(zigzag(x), (x as u64) * 2);
assert_eq!(zigzag(-x), (x as u64) * 2 - 1);
}
}
#[test]
fn test_zigzag_decode() {
// zigzag'
let zigzagp = VarintEncoding::zigzag_decode;
for x in (0..512).map(|x| x * 2) {
assert_eq!(zigzagp(x), x as i64 / 2);
assert_eq!(zigzagp(x + 1), -(x as i64) / 2 - 1);
}
}
#[test]
fn test_zigzag_edge_cases() {
let (zigzag, zigzagp) = (VarintEncoding::zigzag_encode, VarintEncoding::zigzag_decode);
assert_eq!(zigzag(i64::max_value()), u64::max_value() - 1);
assert_eq!(zigzag(i64::min_value()), u64::max_value());
assert_eq!(zigzagp(u64::max_value() - 1), i64::max_value());
assert_eq!(zigzagp(u64::max_value()), i64::min_value());
}
}
| {
ser.serialize_literal_u16(val as u16)
} |
interfaces.ts | namespace?: string;
name?: string;
} | // IPanelOptions is the interface for the options for a users panel used within a dashboard. It contains all required
// fields to reference a team.
export interface IPanelOptions {
cluster?: string; |
|
arrow_down.rs | use seed::{prelude::*, *};
use super::{outline_trait_private::OutlinePrivate, Outline};
pub struct ArrowDown;
impl OutlinePrivate for ArrowDown {
fn base<T>(classes: impl ToClasses) -> Node<T> {
svg![
C![classes],
attrs!(
At::from("fill") => "none",
At::from("stroke") => "currentColor",
At::from("viewBox") => "0 0 24 24",
),
path![attrs!(
At::from("d") => "M19 14l-7 7m0 0l-7-7m7 7V3",
At::from("stroke-linecap") => "round",
At::from("stroke-linejoin") => "round", | ]
}
}
impl Outline for ArrowDown {} | At::from("stroke-width") => "2",
),], |
analytics.py | # Armin Pourshafeie
#TODO write a generator that takes the chromosome and spits out data. do the regression in parallel
#TODO documentation
# Running the gwas
import logging
import numpy as np
import gzip, h5py, os, re, gc, tqdm
from sklearn.linear_model import LogisticRegression
import statsmodels.formula.api as smf
from statsmodels.tools.tools import add_constant
from functools import partial
from pathos.multiprocessing import ProcessingPool as Pool
import sklearn.decomposition as decomp
from scipy.linalg import svd
from scipy.stats import chi2
from scipy.sparse.linalg import eigsh as eig
import mkl
from optimizationAux import *
from plinkio import plinkfile
# Careful here, eigh uses https://software.intel.com/en-us/mkl-developer-reference-c-syevr behind the hood
# so it can be significantly slower
from numpy.core import _methods
from sklearn.utils.extmath import randomized_svd, svd_flip
import time, sys
from corr import nancorr, corr, HweP
from numpy.linalg import inv as inverse
from numpy.core import umath as um
from numpy import mean, isnan
from sklearn.metrics import log_loss
#from numpy.core import umath as um
#umr_maximum = um.maximum.reduce
umr_sum = um.add.reduce
maximum = np.maximum
add = np.add
_mean = _methods._mean
_sum = _methods._sum
sub = np.subtract
div = np.divide
chi2sf = chi2.sf
sqrt = np.sqrt
mean = np.mean
kExactTestBias = 0.00000000000000000000000010339757656912845935892608650874535669572651386260986328125;
kSmallEpsilon = 0.00000000000005684341886080801486968994140625;
kLargeEpsilon = 1e-7
class DO(object):
"""This object represents each data owner. It can compute statistics in a
centralized manner on it's own data, or if it has a centeral hub associated with it it can
communicate with the center"""
def __init__(self, store_name, center=None):
self.store_name = store_name
self.center = center
with h5py.File(self.store_name) as store:
self.has_local_AF = ('has_local_AF' in store.attrs and
store.attrs['has_local_AF'])
self.normalized = ('normalized' in store.attrs and
store.attrs['normalized'])
self.n = store['meta/Status'].shape[0]
self.current_X = None
self.current_Y = None
self.load_snp = True
def clear_tmp(self):
self.current_X = None
self.current_Y = None
def clear_tmpX(self):
self.current_X = None
self.X = None
def clear_tmpY(self):
self.current_Y = None
def count(self, exclude=['meta']):
with h5py.File(self.store_name, 'r') as store:
chroms = [ chrom for chrom in store.keys() if chrom not in exclude ]
c = 0
for chrom in chroms:
c += len(store[chrom].keys())
self.p = c
return c
def likelihood(self, beta, verbose=False):
"""log loss. If beta is a matrix. Verbose refers to when beta is a matrix and not just a vector"""
y_model= 1.0 / (1 + np.exp(-self.X.dot(beta)))
if not verbose:
return log_loss((self.current_Y+1)/2, y_model, normalize=False, labels=[0,1])
else:
return np.array([log_loss((self.current_Y+1)/2, y_pred, normalize=False, labels=[0,1]) for y_pred in y_model.T])
def local_missing_filter(self, t_missing):
n = float(self.n)
def _filter(name, node):
if isinstance(node, h5py.Dataset) and node.parent.name != '/meta':
if node.attrs['local_missing']/n > t_missing:
node.attrs['local_filter'] = True
with h5py.File(self.store_name, 'a') as f:
f.visititems(_filter)
def local_AF_filter(self, t_AF):
def _filter(name, node):
if isinstance(node, h5py.Dataset) and node.parent.name != '/meta':
if 'local_filter' in node.attrs and node.attrs['local_filter']:
# already filtered
return
local_AF = node.attrs['local_AF']
if local_AF + kLargeEpsilon < t_AF or local_AF - kLargeEpsilon > (1-t_AF):
node.attrs['local_filter'] = True
with h5py.File(self.store_name, 'a') as f:
f.visititems(_filter)
def local_HWE_filter(self, t_hwe):
def _filter(name, node):
if isinstance(node, h5py.Dataset) and node.parent.name != '/meta':
if 'local_filter' not in node.attrs or not node.attrs['local_filter']:
v = node.value
vhet = np.sum(v==1)
vhr = np.sum(v==0)
vha = np.sum(v==2)
hwe = HweP(vhet, vhr, vha, 0)
if hwe < t_hwe:
node.attrs['local_filter'] = True
with h5py.File(self.store_name, 'a') as f:
f.visititems(_filter)
def local_LD_filter(self, t_ld, win_sz, step_sz=None):
def pruner(chrom, threshold, window):
window.shape = (1, window.shape[0])
to_delete = set()
n = window.shape[1]
sumLinT, sumSqT, crossT = self.corr_data([chrom], window)
MAF = self.get_MAF(chrom, window[0], global_freq=False)
corrT = corr(sumLinT, sumSqT, crossT)
while (1):
for i, snp1 in enumerate(window[0,:]):
if snp1 in to_delete:
continue
else:
for j in range(i+1, n):
if window[0][j] in to_delete:
continue
elif corrT[i,j]**2 > t_ld:
if MAF[i] > MAF[j] * (1.0 + kLargeEpsilon): #somewhat similar to what plink does
to_delete.add(snp1)
else:
to_delete.add(window[0][j])
break
remaining = np.array([i for i,snp in enumerate(window[0]) if snp not in to_delete])
r2 = corrT[remaining,:][:,remaining]
if np.max(r2**2) < t_ld:
break
return to_delete
if step_sz is None:
step_sz = int(win_sz/2)
with h5py.File(self.store_name, 'a') as f:
for chrom in f.keys():
if chrom == 'meta':
continue
# Get snps that pass the allele frequency threshold
dset = f[chrom]
allsnps = np.array(self.snps_present(chrom))
snps = np.sort(np.array([int(snp) for snp in allsnps if ('local_filter'
not in dset[snp].attrs or not dset[snp].attrs['local_filter'])]))
del allsnps
win_sz = min(snps.shape[0], win_sz)
finished, winstart = False, 0
highLD, to_delete = set(), set()
while not finished:
winend = winstart + win_sz
if winend >= len(snps):
finished = True
winend = len(snps)
window = snps[winstart:winend] #preliminary window
window = np.sort(np.array(list(set(window) - to_delete)))#[:win_sz]
to_delete = pruner(chrom, t_ld, window)
highLD = highLD.union(to_delete)
winstart += step_sz
# Mark highLD
for snp in highLD:
dset[str(snp)].attrs['local_filter'] = True
def clean_by_local_filter(self,chrom=None, keepset=set()):
with h5py.File(self.store_name, 'a') as f:
if chrom is None:
for chrom in f.keys():
if chrom != 'meta':
dset = f[chrom]
for snp in dset:
if 'local_filter' in dset[snp].attrs:
del dset[snp]
else:
dset = f[chrom]
for snp in dset:
if snp not in keepset:
del dset[snp]
def locally_unfiltered(self, chrom):
present = set()
def _counter(name, node):
if 'local_filter' not in node.attrs:
present.add(name)
with h5py.File(self.store_name, 'r') as f:
dset = f[chrom]
dset.visititems(_counter)
return present
def AF_filter(self, threshold, chrom_dset):
return [i for i in chrom_dset if chrom_dset[i].attrs['AF'
]>=threshold and chrom_dset[i].attrs['AF'] <= 1-threshold]
def snps_present(self, chrom_dset):
return [i for i in chrom_dset]
def pruning(self, threshold, Af_threshold, win_sz, step_sz=None):
"""Threshold is for rsquared and win_sz is in number of snps"""
def pruner(dset, threshold, window):
to_delete = set()
for i, snp in enumerate(window):
if snp in to_delete:
continue
else:
snpi = dset[str(snp)].value
for j in range(i+1, len(window)):
if window[j] in to_delete:
continue
elif np.cov(snpi, dset[str(window[j])].value)[0,1]**2 > threshold: # use only with normalzied data
to_delete.add(window[j])
return to_delete
if step_sz == None:
step_sz = int(win_sz/4)
with h5py.File(self.store_name, 'a') as readfp:
for chrom in readfp.keys():
if chrom == 'meta':
continue
logging.info('--Pruning chrom: ' + chrom)
dset = readfp[chrom]
#snps = np.sort(np.array(dset.keys()).astype(int))
snps = np.sort(np.array(self.AF_filter(Af_threshold, dset))).astype(int)
win_sz = min(snps.shape[0], win_sz)
finished, winstart, winend = False, 0, win_sz
highLD = set()
while not finished:
winend = winstart + win_sz
if winend >= len(snps) - 1:
finished = True
winend = len(snps) - 1
window = snps[winstart:winend]
window = np.sort(np.array(list(set(window) - highLD)))
to_delete = pruner(dset, threshold, window)
highLD = highLD.union(to_delete)
winstart += step_sz
toKeep = set(snps) - highLD
logging.debug("----Keeping {} snps after AF/LD pruning".format(len(toKeep)))
for snp in toKeep:
dset[str(snp)].attrs['prune_selected'] = True
def local_pca(self, n_components=None, chroms=None):
with h5py.File(self.store_name, 'r') as store:
if chroms is None:
chroms = [group for group in store if group != 'meta']
chorms = sorted(chroms, key=lambda x: int(x))
to_PCA = []
for chrom in chroms:
dset = store[chrom]
all_snps = sorted(dset.keys(), key=lambda x:int(x))
for snp in all_snps:
if 'local_filter' not in dset[snp].attrs or not dset[snp].attrs['local_filter']:
val = (dset[snp].value.astype(np.float32) - 2*dset[snp].attrs['local_AF'])/dset[snp].attrs['local_sd']
val[np.isnan(val)] = 0
to_PCA += [list(val)]
to_PCA = np.array(to_PCA).T
#to_PCA = 1.0/self.n * to_PCA.T
#pca = PCA(n_components=n_components)
#pca.fit(to_PCA)
N = to_PCA.shape[0]
logging.info("-pca size is {}".format(to_PCA.shape))
u, sigma, vt = randomized_svd(to_PCA, n_components, transpose=False)
u,vt = svd_flip(u, vt, u_based_decision=False)
with h5py.File(self.store_name) as store:
dset = store['meta']
pca_u = dset.require_dataset('pca_u_local', shape=u.shape, dtype=np.float32)
pca_u[:,:] = u
pca_sigma = dset.require_dataset('pca_sigma_local', shape=sigma.shape, dtype=np.float32)
pca_sigma[:] = sigma
pca_v = dset.require_dataset('pca_vt_local', shape=vt.shape, dtype=np.float32)
pca_v[:] = vt
def local_regression(self, numPCs, chrom):
snps = sorted(self.dataset_keys(chrom), key=lambda x:int(x))
model = LogisticRegression(fit_intercept=False, C=1e5)
X = np.empty((self.n, numPCs+1))
betas = np.empty((len(snps), 1))
pvals_local = np.empty_like(betas)
standard_error = np.empty_like(betas)
V = np.matrix(np.zeros(shape = (X.shape[0], X.shape[0])))
with h5py.File(self.store_name, 'r') as store:
X[:,1:] = store['meta/pca_u_local'].value[:, :numPCs]
X[:,1:] /= np.std(X[:,1:], axis=0)
Y = store['meta/Status']
dset = store[chrom]
# Unfortunately, everything is normalized, so we need to undo that
for i, snp_id in enumerate(snps):
snp = dset[snp_id]
local_sd = snp.attrs['local_sd']
if local_sd == 0.0:
pvals_local[i,0] = np.nan
standard_error[i,0] = np.nan
betas[i,0] = np.nan
else:
snpv = snp.value
#Normalize with local values
snpv -= 2*snp.attrs['local_AF']
snpv /= local_sd
snpv[np.isnan(snpv)] = 0
X[:,0] = snpv
model.fit(X, Y)
beta = model.coef_
betas[i, 0] = beta[0,0]
# generate local pvalues
expVal = np.exp(X.dot(beta.T))
ymodel = expVal/(1+expVal)
np.fill_diagonal(V, np.multiply(ymodel, 1-ymodel))
F = X.T * V * X
z = (beta/sqrt(np.diag(inverse(F))).reshape(1,numPCs+1))
z *= z
pvals_local[i,0] = chi2sf(z,1)[0,0]
standard_error[i,0] = sqrt(np.diag(inverse(F))).reshape(1, numPCs+1)[0,0]
return betas, standard_error, pvals_local
def compute_local_AF(self):
def __compute_AF(name, node):
if isinstance(node, h5py.Dataset) and node.parent.name != '/meta':
vals = node.value
vals[vals == 3] = np.nan
node[...] = vals
node.attrs['local_AF'] = np.nanmean(node) / 2.
node.attrs['n'] = node.len()
node.attrs['local_sd'] = np.nanstd(node)
if self.center is None:
node.attrs['AF'] = node.attrs['local_AF']
node.attrs['sd'] = node.attrs['local_sd']
logging.info("-Computing local allele frequencies")
if self.has_local_AF:
logging.info("--Allele frequencies have already been computed")
return
with h5py.File(self.store_name, 'a') as f:
f.visititems(__compute_AF)
self.has_local_AF = True
f.attrs['has_local_AF'] = True
def impute(self):
"""Use after centering the data. This simply replaces Nan's with 0"""
def _imputer(name, node):
if isinstance(node, h5py.Dataset) and node.parent.name != '/meta':
vals = node.value
AF = node.attrs['AF']
vals[np.isnan(vals)] = 0 #(np.round(2*AF) - AF) / node.attrs['sd']
node[...] = vals
with h5py.File(self.store_name, 'a') as f:
f.visititems(_imputer)
# define a class that inherits from above for the group that has centers
class Decentralized_DO(DO):
"""Data owner that can aid in computation of aggregate statistics"""
def group_keys(self):
with h5py.File(self.store_name, 'r') as f:
return f.keys()
def dataset_keys(self, grp):
with h5py.File(self.store_name, 'r') as f:
dset = f[grp]
return dset.keys()
def report_local_AF(self,chrom):
AF_dic = {}
def _report_AF(name, node):
AF_dic[name] = node.attrs['local_AF'], node.attrs['local_sd'], self.n - node.attrs['local_missing']
with h5py.File(self.store_name, 'r') as f:
dset = f[chrom]
dset.visititems(_report_AF)
return AF_dic
def report_SD(self, chrom):
SD_dic = {}
def _report_SD(name, node):
vals = node.value - 2 * node.attrs['AF']
node[...] = vals
SD_dic[name] = np.sqrt(np.nansum(node.value**2)), np.sum(~np.isnan(node.value))
with h5py.File(self.store_name, 'a') as f:
dset = f[chrom]
dset.visititems(_report_SD)
return SD_dic
def normalize(self, chrom):
def _normalizer(name, node):
val = node.value/node.attrs['sd']
node[...] = val
with h5py.File(self.store_name, 'a') as f:
dset = f[chrom]
dset.visititems(_normalizer)
def report_local_missing_rates(self, chrom):
MR_dic = {}
def _report_missing_rate(name, node):
if 'local_missing' not in node.attrs:
print(name)
MR_dic[name] = node.attrs['local_missing']
with h5py.File(self.store_name, 'r') as f:
dset = f[chrom]
dset.visititems(_report_missing_rate)
return MR_dic
def report_local_counts(self, chrom):
HWE_dic = {}
def _report_local_counts(name, node):
v = node.value
HWE_dic[name] = (np.sum(v==0), np.sum(v==1), np.sum(v==2))
with h5py.File(self.store_name, 'r') as f:
dset = f[chrom]
dset.visititems(_report_local_counts)
return HWE_dic
def report_local_std_global_mean(self, chrom):
std_dic = {}
def _report_std(name, node):
std_dic[name] = np.sqrt(np.mean((node.value - 2*node.attrs['AF'])**2))
with h5py.File(self.store_name, 'r') as f:
dset = f[chrom]
dset.visititems(_report_std)
return std_dic
def set_local_AF(self, chrom, AF, pos):
with h5py.File(self.store_name, 'a') as f:
dset = f[chrom]
if pos == 0:
for key, value in AF.iteritems():
dset[key].attrs['AF'] = value[0] / value[2]
if pos == 1:
for key, value in AF.iteritems():
if key in dset:
dset[key].attrs['sd'] = np.sqrt(value[0]/value[1])
def MAF_filter(self, chrom, rate):
with h5py.File(self.store_name, 'a') as f:
dset = f[chrom]
for key in dset.keys():
af = dset[key].attrs['AF']
if af + kLargeEpsilon < rate or af - kLargeEpsilon > (1-rate):
del dset[key]
def HWE_filter(self, chrom, dic, rate):
with h5py.File(self.store_name, 'a') as f:
dset = f[chrom]
for key, value in dic.iteritems():
if value < rate:
del dset[key]
else:
dset[key].attrs['hwe'] = value
def set_missing_rate_filter(self, chrom, MR, rate):
with h5py.File(self.store_name, 'a') as f:
dset = f[chrom]
for key, value in MR.iteritems():
if value > rate:
del dset[key]
else:
dset[key].attrs['missing_rate'] = value
def give_cov(self, chroms, snps_list, cov=True):
n = np.sum([len(item) for item in snps_list])
with h5py.File(self.store_name, 'r') as f:
arr = np.zeros((n, self.n))
j = 0
for i, chrom in enumerate(chroms):
snps = snps_list[i]
dset = f[chrom]
for k in range(len(snps)):
arr[j+k,:] = dset[str(snps[k])].value
#arr[j:j+len(snps),:] = np.array([dset[str(item)] for item in snps])
j += len(snps)
if cov:
return np.cov(arr)
else:
arr = arr.astype(np.float16)
return arr.dot(arr.T)
def corr_data(self, chroms, snps_list):
n = np.sum(len(item) for item in snps_list)
with h5py.File(self.store_name, 'r') as f:
arr = np.zeros((self.n,n), dtype=np.float32)
j = 0
for i, chrom in enumerate(chroms):
snps = snps_list[i]
dset = f[chrom]
for k in range(len(snps)):
arr[:, j+k] = dset[str(snps[k])].value
j += len(snps)
corrmat = nancorr(arr)
return(corrmat)
def get_MAF(self, chrom, window, global_freq=True):
with h5py.File(self.store_name, 'r') as f:
dset = f[chrom]
vals = np.empty(len(window))
for i, snp in enumerate(window):
if global_freq:
af = dset[str(snp)].attrs['AF']
else:
af = dset[str(snp)].attrs['local_AF']
vals[i] = af if af > 0.5 else 1-af
return vals
def give_cov_pca(self, chroms, n, curr_mat, weight, mult=5000): # a hack to deal with memory inefficiencies
#n = np.sum([len(item) for item in snps_list])
mkl.set_num_threads(2)
with h5py.File(self.store_name, 'r') as f:
arr = np.zeros((n, self.n))
j = 0
for i, chrom in enumerate(chroms):
dset = f[chrom]
keyz = sorted([int(i) for i in dset.keys()])
for k,key in enumerate(keyz):
snp = dset[str(key)]
value = snp.value
#AF = snp.attrs['AF']
#value -= 2*AF
value[np.isnan(value)] = 0#(np.round(2*AF) - 2*AF)
#value /= dset[str(key)].attrs['sd']
arr[j+k,:] = value
j += len(keyz)
arr = arr.astype(np.float32)
arr /= np.sqrt(weight)
blocks = arr.shape[0]/mult
for i in range(blocks):
curr_mat[i*mult:(i+1)*mult,:] += arr[i*mult:(i+1)*mult,:].dot(arr.T)
curr_mat[blocks*mult:,:] += arr[blocks*mult:,:].dot(arr.T)
def give_data(self,chroms, n):
"""Should only be used to compute PCA locally for comparison's sake."""
arr = np.empty((self.n, n))
with h5py.File(self.store_name, 'r') as f:
j = 0
for i, chrom in enumerate(chroms):
dset = f[chrom]
keyz = sorted([int(i) for i in dset.keys()])
for k, key in enumerate(keyz):
value = dset[str(keyz[k])].value
AF = dset[str(keyz[k])].attrs['AF']
value[np.isnan(value)] = (np.round(2*AF) - 2*AF) / dset[str(keyz[k])].attrs['sd']
arr[:, j+k] = value
j += len(keyz)
return arr
def give_snp_data(self, chrom, location, npcs):
X = np.empty((self.n, npcs+1))
with h5py.File(self.store_name, 'r') as f:
dset = f[chrom]
X[:,0] = dset[str(location)].value
y = np.sign(f["meta/Status"].value - 0.5).reshape(self.n, 1)
X[:,1:] = f["meta/pca_u"].value[:, :npcs] * 1/0.10485152
return X, y
def give_moments(self, addresses):
first_mom, second_mom = [], []
with h5py.File(self.store_name, 'r') as f:
for address in addresses:
vals = f[address].value
first_mom.append(np.mean(vals, axis=0))
second_mom.append(np.mean(vals ** 2, axis=0))
return first_mom, second_mom
def snp_loader(self, stds, npcs, covp, pos):
""" Load the snp. Particularly useful if there are iterations"""
with h5py.File(self.store_name, 'r') as f:
if self.current_Y is None:
X = np.empty((self.n, covp))
self.current_Y = np.sign(f["meta/Status"].value - 0.5).reshape(self.n, 1)
X[:,-npcs:] = f["meta/pca_u"].value[:, :npcs] * 1/stds
self.current_X = X * -self.current_Y
self.X = X
i = 0
for chrom, loc in pos:
snp = f[chrom + "/" + loc]
# If nobody has any variation, don't bother
if snp.attrs['sd'] == 0:
raise ValueError()
val = snp.value
val[np.isnan(val)] = 0
self.X[:,i] = val
i += 1
self.current_X[:, :i] = self.X[:, :i] * -self.current_Y
self.load_snp = False
def run_regression(self, pos, npcs, beta, stds, logistic, covp):
if self.load_snp:
self.snp_loader(stds, npcs, covp, pos)
model = LogisticRegression(fit_intercept=False, C=1e5, warm_start=beta)
model.fit(self.X, self.current_Y)
return model.coef_
def admm_update(self, pos, npcs, u, beta, rho, z0, stds,logistic, covp):
"""Runs a regularized logistic regression with a penalty that draws the answer
closer to beta"""
# If temp values are not set, set them up
if self.load_snp:
self.snp_loader(stds, npcs, covp, pos)
return bfgs_more_gutted(self.current_X, u, beta, rho, z0, covp)
# if logistic:
# #x,v,d = bfgs_update(self.current_X, u, beta, rho, z0)
# #x = bfgs_gutted(self.current_X, u, beta, rho, z0)
# x = bfgs_more_gutted(self.current_X, u, beta, rho, z0, n)
# return x
# else:
# pass
# return x
def covLogit(self, pos, beta, stds, logistic, last=True):
"""returns the variance covariance matrix for thelogistic regression
with the provided parameters. Used for Wald pvalues"""
if self.load_snp:
pcov = len(beta)
npcs = pcov - len(pos)
self.X = np.empty((self.n, pcov))
with h5py.File(self.store_name, 'r') as f:
i = 0
for chrom, loc in pos:
self.current_X[:, i] = f[chrom+"/"+loc].value
i += 1
self.X[:, i:] = f["meta/pca_u"].value[:, :npcs] * 1/stds
# if logistic:
X = self.X
expVal = np.exp(X.dot(beta))
ymodel = expVal/(1+expVal)
V = np.matrix(np.zeros(shape = (X.shape[0], X.shape[0])))
np.fill_diagonal(V, np.multiply(ymodel, 1-ymodel))
F = X.T * V * X
# will move on so clear the load_snp flag
if last:
self.load_snp = True
return F
def update_pheno(self, phenodict):
with h5py.File(self.store_name, 'a') as f:
dset = f['meta']
ids = dset['id'].value
phenos = [phenodict[i] for i in ids]
dset['Status'][...] = phenos
def copy_pca(self, other, local):
if not local:
pca_u = 'pca_u'
pca_sigma = 'pca_sigma'
pca_vt = 'pca_v.T'
else:
pca_u = 'pca_u_local'
pca_sigma = 'pca_sigma_local'
pca_vt = 'pca_vt_local'
with h5py.File(self.store_name, 'a') as thisf:
with h5py.File(other, 'r') as otherf:
thismeta = thisf['meta']
othermeta = otherf['meta']
if pca_u in thismeta:
del thismeta[pca_u]
del thismeta[pca_sigma]
del thismeta[pca_vt]
pca_u_value = othermeta[pca_u].value
us = thismeta.require_dataset(pca_u, shape=pca_u_value.shape, dtype=np.float32)
us[:] = pca_u_value
del pca_u_value
pca_sigmas = othermeta[pca_sigma].value
ss = thismeta.require_dataset(pca_sigma, shape=pca_sigmas.shape, dtype=np.float32)
ss[:] = pca_sigmas
del pca_sigmas
pca_vT = othermeta[pca_vt].value
vs = thismeta.require_dataset(pca_vt, shape=pca_vT.shape, dtype=np.float32)
vs[:] = pca_vT
del pca_vT
def record_centralized_pca(self, sigma, Us):
with h5py.File(self.store_name, 'a') as f:
dset = f['meta']
if 'Centralized_PCA_sigma' in dset:
del dset['Centralized_PCA_sigma']
del dset['PCA_Us_Centralized']
first = dset.require_dataset('Centralized_PCA_sigma', shape=sigma.shape, dtype=np.float32)
first[:] = sigma
pca_components = dset.require_dataset('PCA_Us_Centralized', shape = Us.shape, dtype=np.float32)
pca_components[:] = Us
def AF_filter(self, threshold, chrom):
with h5py.File(self.store_name, 'r') as f:
dset = f[chrom]
return super(Decentralized_DO, self).AF_filter(threshold, dset)
def snps_present(self, chrom):
with h5py.File(self.store_name, 'r') as f:
dset = f[chrom]
return super(Decentralized_DO, self).snps_present(dset)
def tag_snps(self, chrom, keys, attr_tag, value):
with h5py.File(self.store_name, 'a') as f:
dset = f[chrom]
for key in keys:
dset[str(key)].attrs[attr_tag] = value
def delete_snps(self, chrom, keys):
with h5py.File(self.store_name, 'a') as f:
dset = f[chrom]
for key in keys:
del dset[str(key)]
def passed_LD(self, chrom):
indicies = []
def was_selected(name, node):
if 'prune_selected' in node.attrs:
indicies.append(name)
with h5py.File(self.store_name, 'r') as f:
dset = f[chrom]
dset.visititems(was_selected)
return sorted(indicies, key=lambda x: int(x))
def store_eigs(self, sigma, v, chroms):
"""Computes U's given the centralized sigma and V. Stores all the variables"""
with h5py.File(self.store_name, 'a') as store:
dset = store['meta']
pca_sigma = dset.require_dataset('pca_sigma', shape=sigma.shape,
dtype = np.float16)
sigma = np.sqrt(sigma)
pca_sigma[:] = sigma
inv_sig = sigma.copy()
inv_sig[inv_sig > 0] = 1.0/inv_sig[inv_sig > 0]
# this part can be done for small groups at a time to save memory
n = self.count()#np.sum([len(item) for item in snps_list])
arr = np.zeros((self.n, n))
j = 0
for i, chrom in enumerate(chroms):
dset = store[chrom]
snps = sorted([int(i) for i in dset.keys()])
for k, key in enumerate(snps):
val = dset[str(key)].value
# It is already normalized and centered
# AF = dset[str(key)].attrs['AF']
# val -= 2*AF
val[np.isnan(val)] = 0#(np.round(2*AF) - 2*AF) #/ dset[str(snps[k])].attrs['sd']
arr[:, j+k] = val.T
#arr[:, j:j+len(snps)] = np.array([dset[str(item)] for item in snps]).T
j += len(snps)
u = arr.dot(v.T).dot(np.diag(inv_sig))
u, v = svd_flip(u, v, u_based_decision=False)
dset = store['meta']
pca_vt = dset.require_dataset('pca_v.T', shape=v.shape, dtype=np.float32)
pca_vt[:,:] = v
pca_u = dset.require_dataset('pca_u', shape=u.shape, dtype=np.float32)
pca_u[:,:] = u
def set_normalized(self, value):
|
def compute_local_missing_rates(self):
def __compute_missing_rate(name, node):
if isinstance(node, h5py.Dataset) and node.parent.name != '/meta':
node.attrs['local_missing'] = np.sum(node.value==3)
logging.info("-Computing local missing rates")
with h5py.File(self.store_name, 'a') as f:
f.visititems(__compute_missing_rate)
class Center(object):
"""The central hub that drives and requires particular computations from each node."""
def __init__(self, store_names, n_cores=1):
self.store_names = store_names
self.nDOs = len(store_names)
self.ncores = n_cores
self.DOs = [Decentralized_DO(s_name, self) for s_name in self.store_names]
self.keys = self.DOs[0].group_keys()
self.n = sum([item.n for item in self.DOs])
logging.info("- Setup center with {} DOs and {} individuals". format(
self.nDOs, self.n))
def loci_missing_rate_filter(self, rate):
for DO in self.DOs:
DO.compute_local_missing_rates()
for chrom in self.keys:
if chrom != 'meta':
logging.info("Consensus missing rate computation on chrom: {}".format(chrom))
MR = add_dict()
MR.set_key_values(self.DOs[0].dataset_keys(chrom), 0)
for DO in self.DOs:
update_dic = DO.report_local_missing_rates(chrom)
MR.update(update_dic, 1.0)
for DO in self.DOs:
DO.set_missing_rate_filter(chrom, MR, rate * self.n)
def MAF_filter(self, rate):
"""Computes local and consensus AF, sd.
Removes loci below the specified MAF"""
def AF_wrapper(DO):
DO.compute_local_AF()
#with Pool(self.ncores) as pool:
#pool.map(AF_wrapper , self.DOs)
for DO in self.DOs:
AF_wrapper(DO)
for chrom in self.keys:
if chrom != 'meta':
logging.info("---Consensus AF computation on chrom: {}".format(chrom))
AF = add_dict()
AF.set_key_values(self.DOs[0].dataset_keys(chrom),[0,0,0])
for DO in self.DOs:
update_dic = DO.report_local_AF(chrom)
AF.update(update_dic, 1.0, 0)
# update the overall AF
for DO in self.DOs:
DO.set_local_AF(chrom, AF, 0)
if rate is not None:
DO.MAF_filter(chrom, rate)
def compute_std(self, chrom):
if chrom != 'meta':
logging.info("--consensus SD computation on chrom: {}".format(chrom))
SD = add_dict()
SD.set_key_values(self.DOs[0].dataset_keys(chrom), [0,0])
for DO in self.DOs:
update_dic = DO.report_SD(chrom)
SD.update(update_dic, 1.0, 1) #TODO This is a colossal fuck up (AF,SD, HWE). All of this shit needs to be done by passing counts As the sufficient statistics. but too late for that shit now. will clean up later
for DO in self.DOs:
DO.set_local_AF(chrom, SD, 1)
def normalize(self):
for chrom in self.keys:
if chrom != 'meta':
logging.info("--normalizing chrom: {}".format(chrom))
self.compute_std(chrom)
for DO in self.DOs:
DO.normalize(chrom)
def HWE_filter(self, rate):
for chrom in self.keys:
if chrom != 'meta':
logging.info("-HWE computation on chrom: {}".format(chrom))
HWE = add_dict()
HWE.set_key_values(self.DOs[0].dataset_keys(chrom),np.array([0,0,0]))
for DO in self.DOs:
update_dic = DO.report_local_counts(chrom)
HWE.update(update_dic, 1.0)
for key, value in HWE.iteritems():
hwe = HweP(int(value[1]), int(value[0]), int(value[2]), 0 )
HWE[key] = hwe
for DO in self.DOs:
DO.HWE_filter(chrom, HWE, rate)
def HWE_test(self, homor, het, homoa):
"""HWE test (midpoint test). Other versions of HWE filter can be impelemented with the same information.
This implementation should match PLINK1.9's implementation."""
homc = max(homor, homoa)
homr = min(homor, homoa)
rare = 2 * homr + het
# mid point of the distribution
n = (homor + het + homoa) * 2
tail_p = (1 - kSmallEpsilon) * kExactTestBias
centerp = 0
lastp2, lastp1 = tailp, tailp
#if (obs_hets * genotypes2 > rare_copies * (genotypes2 - rare_copies)):
mid = int(rare * (2 * n -rare) / (2 * n))
if (mid % 2 != rare % 2):
mid += 1
probs = np.zeros(1 + rare)
probs[mid] = 1.0
tsum = 1.0
curr_hets = mid
curr_homr = (rare - mid) / 2
curr_homc = n - curr_hets - curr_homr
while (curr_hets >= 2):
probs[curr_hets - 2] = probs[curr_hets ] * (curr_hets) * (curr_hets - 1.0) / (4.0 * (curr_homr - 1.0) * (curr_homc + 1.0))
tsum += probs[curr_hets - 2]
curr_hets -= 2
curr_homr += 1
curr_homc += 1
curr_hets = mid
curr_homr = (rare - mid) / 2
curr_homc = n - curr_hets - curr_homr
while (curr_hets <= rare -2):
probs[curr_hets + 2] = probs[curr_hets] * 4.0 * curr_homr * curr_homc / ((curr_hets + 2.0) * (curr_hets + 1.0))
tsum += probs[curr_hets + 2]
curr_hets += 2
curr_homr -= 1
curr_homc -= 1
# target = probs[het]
# return min(1.0, np.sum(probs[probs <= target])/tsum)
probs /= tsum
p_hi = float(probs[het])
for i in xrange(het + 1, rare + 1):
p_hi += probs[i]
#
p_lo = float(probs[het])
for i in xrange(het-1, -1, -1):
p_lo += probs[i]
p_hi_lo = 2.0 * p_hi if p_hi < p_lo else 2.0 * p_lo
p_hwe = 0.0
for i in xrange(0, rare + 1):
if probs[i] > probs[het]:
continue
p_hwe += probs[i]
p_hwe = 1.0 if p_hwe > 1.0 else p_hwe
return p_hwe
def correct_LD_prune(self, threshold, win_sz, step_sz=None):
#TODO use local_LD_filter
def pruner(chrom, threshold, window):
window.shape = (1, window.shape[0])
to_delete = set()
n = window.shape[1]
sumLinT = np.zeros((n,n), dtype = np.float32)
sumSqT = np.zeros((n,n), dtype = np.float32)
crossT = np.zeros((n,n), dtype = np.float32)
for DO in self.DOs:
sumLin, sumSq, cross = DO.corr_data([chrom], window)
sumLinT += sumLin
sumSqT += sumSq
crossT += cross
MAF = DO.get_MAF(chrom, window[0], global_freq=True)
corrT = corr(sumLinT, sumSqT, crossT)
while (1):
for i, snp1 in enumerate(window[0,:]):
if snp1 in to_delete:
continue
else:
for j in range(i+1, n):
if window[0][j] in to_delete:
continue
elif corrT[i,j]**2 > threshold:
if MAF[i] > MAF[j] * (1.0 + kLargeEpsilon): #somewhat similar to what plink does
#ai = sumLin[i,j] / cross[i, j]
#aj = sumLin[j,i] / cross[i, j]
#majori = ai if ai > .5 else 1 - ai
#majorj = aj if aj > .5 else 1 - aj
#if ai > aj * (1 + kSmallEpsilon):
to_delete.add(snp1)
else:
to_delete.add(window[0][j])
break
remaining = np.array([i for i,snp in enumerate(window[0]) if snp not in to_delete])
r2 = corrT[remaining,:][:,remaining]
if np.max(r2**2) < threshold:
break
return to_delete
if step_sz is None:
step_sz = int(win_sz/2)
for chrom in self.keys:
if chrom == 'meta':
continue
logging.debug("---Decentralized LD pruning on chrom: {}".format(chrom))
# Get snps that pass the allele frequency threshold
snps = np.sort(np.array(self.DOs[0].snps_present(chrom)).astype(int))
win_sz = min(snps.shape[0], win_sz)
finished, winstart = False, 0
highLD, to_delete = set(), set()
while not finished:
winend = winstart + win_sz
if winend >= len(snps):
finished = True
winend = len(snps)
window = snps[winstart:winend] #preliminary window
window = np.sort(np.array(list(set(window) - to_delete)))#[:win_sz]
to_delete = pruner(chrom, threshold, window)
highLD = highLD.union(to_delete)
winstart += step_sz# + offset[0][0]
#toKeep = set(snps) - highLD
logging.info("---Keeping {} snps after AF/LD pruning".format(len(snps) - len(highLD)))
for DO in self.DOs:
DO.delete_snps(chrom, highLD)
def LD_prune(self,threshold, AF_threshold, win_sz, step_sz=None):
"""Flag snps that have small LD"""
def pruner(chrom, threshold, window):
window.shape = (1, window.shape[0])
to_delete = set()
n = window.shape[1]
cov = np.zeros((n,n))
# considerable optimization can be done so that only the parts
# that are previously not communicated get communicated
for DO in self.DOs:
cov += float(DO.n)/float(self.n) * DO.give_cov([chrom], window)
#cov /= self.nDOs
# with covariance matrix we can be more accurate than the
# simple greedy we implemented in centralized but we go with the
# same algorithm for comparison's sake
for i, snp in enumerate(window[0,:]):
if snp in to_delete:
continue
else:
for j in range(i+1, window.shape[1]):
if window[0,j] in to_delete:
continue
elif cov[i,j]**2 > threshold:
to_delete.add(window[0,j])
return to_delete
if step_sz == None:
step_sz = int(win_sz/2)
for chrom in self.keys:
if chrom == 'meta':
continue
logging.info("---Decentralized LD pruning on chrom: {}".format(chrom))
# Get snps that pass the allele frequency threshold
snps = np.sort(np.array(self.DOs[0].AF_filter(AF_threshold, chrom))).astype(int)
win_sz = min(snps.shape[0], win_sz)
finished, winstart = False, 0
highLD = set()
i = 0
while not finished:
winend = winstart + win_sz
if winend >= len(snps) - 1:
finished = True
winend = len(snps) - 1
window = snps[winstart:winend]
window = np.sort(np.array(list(set(window) - highLD)))
to_delete = pruner(chrom, threshold, window)
highLD = highLD.union(to_delete)
winstart += step_sz
if winstart / 5000 > i:
logging.debug("pruning at {}".format(winstart))
i += 1
toKeep = set(snps) - highLD
logging.info("----Keeping {} snps after AF/LD pruning".format(len(toKeep)))
for DO in self.DOs:
DO.tag_snps(chrom, toKeep, 'prune_selected', True)
def PCA(self, n_components=None, chroms=None):
if chroms is None or chroms == []:
chroms = [item for item in self.keys if item != 'meta']
chroms = sorted(chroms, key=lambda x: int(x))
DO = self.DOs[0]
n = DO.count(list(set(self.keys) - set(chroms)))
to_PCA = np.zeros((n, n), dtype=np.float32)
logging.info("Preparing covariance matrix of size {}".format(n))
for DO in self.DOs:
DO.give_cov_pca(chroms, n, to_PCA, 1.0)# float(DO.n)/float(DO.n-1))
if n_components is not None:
m = min(self.n, n)
m = min(m, n_components)
#n_components = (n - n_components, n-1)
#sigma, v = eig(to_PCA, overwrite_a=True, eigvals=n_components)# for linalg.eigh slow
logging.info("Running PCA")
sigma, v = eig(to_PCA, k=n_components, ncv=3*n_components)
logging.info("Done running PCA")
# there should be no ev with negativ e ev. If there is it should
# be tiny and due to numerical errors
del to_PCA
sigma, v = zip(*sorted(zip(sigma, v.T),reverse=True))
v = np.array(v)
sigma = np.array(sigma)
sigma[sigma < 0] = 0
for DO in self.DOs:
DO.store_eigs(sigma, v, chroms)
#pca = PCA(n_components=n_components)
#for now ignore the n_components arg
#pca.fit(to_PCA)
def change_pheno(self, pheno_plink):
pheno_file = plinkfile.open(pheno_plink)
sample_list = pheno_file.get_samples()
iid = [item.iid for item in sample_list]
status = [item.affection for item in sample_list]
status_dict = dict((key, value) for (key, value) in zip(iid, status))
for DO in self.DOs:
DO.update_pheno(status_dict)
def copy_pca(self, other, local=False):
for DO in self.DOs:
base = os.path.basename(DO.store_name)
file_name = os.path.join(other, base)
DO.copy_pca(file_name, local)
def run_regression(self, numPCs, n_iters, warm_start=True, chroms=[], sites=None, kind='ADMM',
verbose=False, out_file="d_beta.txt"):
def _regression(kind, verbose, **kwargs):
"""Dispatches to regression algorithm"""
if kind == 'ADMM':
if verbose:
return self._ADMM_verbose(**kwargs)
else:
return self._ADMM(**kwargs)
elif kind == 'AVG':
return self._AVG(**kwargs)
logging.info("-Running regression")
DOs = self.DOs
kwargs = {"rho": 10.0, "max_iters":n_iters, "alpha":1.2,
"npcs":numPCs, "mu":0.0}#self.n * 1e-9}
# Compute the variance of PCs
first_moment = np.zeros((1, numPCs))
second_moment = np.zeros((1, numPCs))
#covp = len(pos) + numPCs
covp = numPCs + 1
for DO in DOs:
DO.load_snp = True
m1, m2 = DO.give_moments(["meta/pca_u"])
first_moment += np.array(m1[0][:numPCs]) * DO.n / float(self.n)
second_moment += np.array(m2[0][:numPCs]) * DO.n / float(self.n)
stds = np.sqrt(second_moment - first_moment**2)
kwargs["stds"] = stds
write_n = 50
if verbose:
write_n = write_n / 10
# Run for each snp
if len(chroms) == 0 :
chroms = self.keys
else:
chroms = [unicode(str(chrom)) for chrom in chroms]
num_g = DOs[0].count(exclude=list(set(self.keys) - set(chroms)))
pbar = tqdm.tqdm(total=num_g)
counter, all_betas, warm_beta = 0, [], np.zeros((covp, 1))
# Run regression with PC's only one time, to get the likelihood for the smaller model
kwargs['pos'] = []
kwargs["beta"] = warm_beta[1:]
pc_beta = _regression(kind, False, **kwargs)
pc_likelihood = 0
warm_beta[1:] = pc_beta
for DO in DOs:
pc_likelihood += DO.likelihood(pc_beta)
DO.load_snp = True
DO.current_Y = None
if not verbose:
pval = np.empty((covp + 2, 1))
else:
pval = np.empty((covp + 2, n_iters+1))
# Run regression for everything else and compute the log likelihood difference/Wald Pvalues
with open(out_file, 'w') as fout:
for chrom in chroms:
if chrom == 'meta':
continue
logging.info("--Running {} on chromosome: {}".format(kind, chrom))
snps = sorted(DOs[0].dataset_keys(chrom), key=lambda x:int(x))
pval[covp+1, :] = chrom
for snp in snps:
kwargs["pos"] = [(chrom, snp)]
kwargs["beta"] = warm_beta
beta = _regression(kind, verbose, **kwargs)
if isnan(beta[0,0]):
pval[:covp+1,:] = np.nan
for DO in DOs:
DO.load_snp = True
else:
likelihood = 0
for DO in DOs:
likelihood += DO.likelihood(beta, verbose)
covLogit = _sum([DO.covLogit([(chrom, snp)], beta, stds, True) for DO in DOs], axis=0)
# get pvalues
covLogit = inverse(covLogit)
z = (beta / sqrt(np.diag(covLogit)).reshape(covp, 1))
z = z * z
pval[:covp,:] = chi2sf(z, 1)
pval[covp,:] = likelihood - pc_likelihood
if not verbose:
all_betas.append( "\t".join(map(str, beta[:,0])) +"\t" + "\t".join(map(str, pval[:,0])))
else:
for ind, line in enumerate(beta.T):
all_betas.append( "\t".join(map(str, line)) +"\t" + "\t".join(map(str, pval[:,ind].tolist() + [ind])))
counter += 1
if counter == write_n:
fout.write('\n'.join(all_betas))
fout.write('\n')
counter = 0
all_betas = []
pbar.update(write_n)
fout.write('\n'.join(all_betas))
def _ADMM(self, pos, npcs, rho, beta, alpha=1., max_iters=10, mu=0.0, stds=1, #1e-9, stds = 1,
logistic=True, verbose=True): # mu is really self.n * mu
"""Performs ADMM regression. So far, only logistic regression is implemented."""
DOs = self.DOs
covp = len(pos) + npcs
K = len(DOs)
z = np.zeros((covp, K))
u = np.zeros((covp, K))
# shrink_param = mu / float(rho * K)
for k in xrange(max_iters):
for i, DO in enumerate(DOs): # can be parallelized
try:
# z update:
z[:,i] = DO.admm_update(pos, npcs,u[:,i, None], beta, rho, z[:,i, None], stds, logistic, covp)
except ValueError:
beta *= np.nan
return beta
# Update betas
z_hat = add(alpha * z, sub(1.0, alpha) * beta)
# meanVal = div(_sum(add(z_hat, u), 1)[:,None], K)
# beta = div(_sum(add(z_hat, u), 1)[:,None], K)
beta = div(umr_sum(z_hat,1)[:,None], K)
# beta = sub(maximum(0, sub(meanVal, shrink_param)), maximum(0, -add(meanVal, shrink_param)))
# Update u:
u += sub(z_hat, beta)
return beta
def _ADMM_verbose(self, pos, npcs, rho, beta, alpha=1.0, max_iters=10, mu=1e-9, stds=1,
logistic=True):
"""Same as _ADMM except records the beta after every iteration. _ADMM avoids checking the
condition over and over again. Probably a stupid optimization but w/e"""
DOs = self.DOs
covp = len(pos) + npcs
K = len(DOs)
z = np.zeros((covp, K))
u = np.zeros((covp, K))
shrink_param = mu / float(rho * K)
Betas = np.empty((covp, max_iters+1))
Betas[:,0] = 0
Us = np.empty((1, max_iters+1))
Us[0,0] = 0
for k in xrange(max_iters):
for i, DO in enumerate(DOs): # can be parallelized
try:
# z update:
z[:,i] = DO.admm_update(pos, npcs,u[:,i, None], beta, rho, z[:,i, None], stds, logistic, covp)
except ValueError:
Betas[k+1:, :] = np.nan
return beta
# Update betas
z_hat = add(alpha * z, sub(1.0, alpha) * beta)
#meanVal = div(_sum(add(z_hat, u), 1)[:,None], K)
#beta = sub(maximum(0, sub(meanVal, shrink_param)), maximum(0, -add(meanVal, shrink_param)))
beta = div(umr_sum(add(z_hat, u), 1)[:,None], K)
Betas[:,k+1] = beta[:,0]
# Update u:
u += sub(z_hat, beta)
Us[0,k+1] = np.linalg.norm(u)
return Betas
def _AVG(self, pos, npcs, stds = 1, logistic=True, verbose=True, **kwargs):
"""Performs Average regression. So far, only logistic regression is implemented.
Performs the regression on de-centralized data. This simply averages all the results,
for the actual analysis, we used inverse variance weighted averaging FE model."""
covp = len(pos) + npcs
DOs = self.DOs
N = float(self.n)
beta = np.zeros((covp, 1))
for i, DO in enumerate(DOs): # can be parallelized
# try:
beta += DO.run_regression(pos, npcs, beta, stds, logistic, covp).T * DO.n / N
# except ValueError:
# beta *= np.nan
# return beta
# Update betas
return beta
def PCA_Centralized(self, n_components=None, chroms=None):
from sklearn.decomposition import PCA
if chroms is None or chroms == []:
chroms = [item for item in self.keys if item != 'meta']
chroms = sorted(chroms, key=lambda x: int(x))
DO = self.DOs[0]
n = DO.count(list(set(self.keys) - set(chroms)))
data = np.empty((self.n, n), dtype=np.float32)
logging.info("centralizing data just to run centralized PCA")
start = 0
for DO in self.DOs:
data[start:start+DO.n,:] = DO.give_data(chroms,n)
start += DO.n
pca = decomp.PCA()
U, S, V = pca._fit_truncated(data, n_components=n_components, svd_solver = 'arpack')
# u, sigma, vt = randomized_svd(data, n_components, transpose=False)
# u,vt = svd_flip(u, vt, u_based_decision=False)
self.DOs[0].record_centralized_pca(S, U)
logging.info("Done with centralized PCA")
def run_meta_filters(self, t_missing=None, t_AF=None, t_hwe=None, t_LD=None, win_sz=50, global_clean=False):
def count(global_clean):
unfiltered = 0
for chrom in self.keys:
if chrom == 'meta':
continue
present = self.DOs[0].locally_unfiltered(chrom)
for DO in self.DOs[1:]:
present = present.intersection(DO.locally_unfiltered(chrom))
unfiltered += len(present)
if global_clean:
for DO in self.DOs:
DO.clean_by_local_filter(chrom, present)
return(unfiltered)
if t_missing is not None:
logging.info("Starting local missing filter")
for DO in self.DOs:
DO.local_missing_filter(t_missing)
unfiltered = count(global_clean)
logging.info("After missing rate filter {} snps remain".format(unfiltered))
if t_AF is not None:
logging.info("Starting local AF")
for DO in self.DOs:
DO.local_AF_filter(t_AF)
unfiltered = count(global_clean)
logging.info("After AF filter {} snps remain".format(unfiltered))
if t_hwe is not None:
logging.info("Starting HWE filter")
for DO in self.DOs:
DO.local_HWE_filter(t_hwe)
unfiltered = count(global_clean)
logging.info("After HWE filter {} snps remain".format(unfiltered))
if t_LD is not None:
logging.info("Running LD filter")
for DO in self.DOs:
DO.local_LD_filter(t_LD, win_sz) #implement
unfiltered = count(global_clean)
logging.info("After LD filter {} snps remain".format(unfiltered))
def run_local_pca(self, n_components=10, chroms=None):
for DO in self.DOs:
DO.local_pca(n_components, chroms)
def run_meta_regression(self, numPCs, out_file):
logging.info("Starting meta regression...")
chroms = self.keys
with open(out_file, 'a') as fout:
for chrom in chroms:
if chrom == 'meta':
continue
logging.info("Moving on to chrom " + chrom)
for i, DO in enumerate(self.DOs):
betas, standard_errors, pvals = DO.local_regression(numPCs, chrom)
if not i: # first DO
to_write = np.empty((len(betas), 3*len(self.DOs)+1))
to_write[:,i] = betas[:,0]
to_write[:,i+len(self.DOs)] = standard_errors[:,0]
to_write[:,i+2*len(self.DOs)] = pvals[:,0]
to_write[:,3*len(self.DOs)] = chrom
np.savetxt(fout, to_write)
logging.info("Finished Meta-regressions")
def impute (self):
for DO in self.DOs:
DO.impute()
logging.info("DUUUUDE")
class add_dict(dict):
def set_key_values(self, keys=None, value=None):
if keys is None:
keys = self.keys()
if value is None:
value = 0
for key in keys:
self[key] = value
def update(self, other, frac=1.0, pos=None):
if pos is None:
k1 = other.keys()[0]
if isinstance(other[k1], int):
for key, value in other.iteritems():
dicVal = self[key]
self[key] = dicVal + frac * value
else:# it is an array
for key, value in other.iteritems():
dicVal = self[key]
self[key] = [x + frac * y for x,y in zip(dicVal, value)]
elif pos == 0: #deal with these later TODO they can be put in the framework above
for key, value in other.iteritems():
dicVal = self[key]
self[key] = dicVal[0] + value[2] * value[0], dicVal[1], dicVal[2] + value[2]
elif pos == 1:
for key, value in other.iteritems():
dicVal = self[key]
self[key] = dicVal[0] + value[0]**2, dicVal[1] + value[1]
if __name__=='__main__':
print "no commands here yet. Test using WTCCC_run.py"
| with h5py.File(self.store_name, 'a') as store:
store.attrs['normalized'] = value |
gestion.module.ts | import { Module } from '@nestjs/common'; |
@Module({})
export class GestionModule {} | |
tx_dialog.py | from kivy.app import App
from kivy.factory import Factory
from kivy.properties import ObjectProperty
from kivy.lang import Builder
from kivy.clock import Clock
from kivy.uix.label import Label
from electrum_gui.kivy.i18n import _
from datetime import datetime
from electrum.util import InvalidPassword
Builder.load_string('''
<TxDialog>
id: popup
title: _('Transaction')
is_mine: True
can_sign: False
can_broadcast: False
can_rbf: False
fee_str: ''
date_str: ''
amount_str: ''
tx_hash: ''
status_str: ''
description: ''
outputs_str: ''
BoxLayout:
orientation: 'vertical'
ScrollView:
GridLayout:
height: self.minimum_height
size_hint_y: None
cols: 1
spacing: '10dp'
padding: '10dp'
GridLayout:
height: self.minimum_height
size_hint_y: None
cols: 1
spacing: '10dp'
BoxLabel:
text: _('Status')
value: root.status_str
BoxLabel:
text: _('Description') if root.description else ''
value: root.description
BoxLabel:
text: _('Date') if root.date_str else ''
value: root.date_str
BoxLabel:
text: _('Amount sent') if root.is_mine else _('Amount received')
value: root.amount_str
BoxLabel:
text: _('Transaction fee') if root.fee_str else ''
value: root.fee_str
TopLabel:
text: _('Outputs') + ':'
OutputList:
height: self.minimum_height
size_hint: 1, None
id: output_list
TopLabel:
text: _('Transaction ID') + ':' if root.tx_hash else ''
TxHashLabel:
data: root.tx_hash
name: _('Transaction ID')
Widget:
size_hint: 1, 0.1
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Sign') if root.can_sign else _('Broadcast') if root.can_broadcast else _('Bump fee') if root.can_rbf else ''
disabled: not(root.can_sign or root.can_broadcast or root.can_rbf)
opacity: 0 if self.disabled else 1
on_release:
if root.can_sign: root.do_sign()
if root.can_broadcast: root.do_broadcast()
if root.can_rbf: root.do_rbf()
IconButton:
size_hint: 0.5, None
height: '48dp'
icon: 'atlas://gui/kivy/theming/light/qrcode'
on_release: root.show_qr()
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Close')
on_release: root.dismiss()
''')
class TxDialog(Factory.Popup):
def __init__(self, app, tx):
Factory.Popup.__init__(self)
self.app = app
self.wallet = self.app.wallet
self.tx = tx
def | (self):
self.update()
def update(self):
format_amount = self.app.format_amount_and_units
tx_hash, self.status_str, self.description, self.can_broadcast, self.can_rbf, amount, fee, height, conf, timestamp, exp_n = self.wallet.get_tx_info(self.tx)
self.tx_hash = tx_hash or ''
if timestamp:
self.date_str = datetime.fromtimestamp(timestamp).isoformat(' ')[:-3]
elif exp_n:
self.date_str = _('Within %d blocks') % exp_n if exp_n > 0 else _('unknown (low fee)')
else:
self.date_str = ''
if amount is None:
self.amount_str = _("Transaction unrelated to your wallet")
elif amount > 0:
self.is_mine = False
self.amount_str = format_amount(amount)
else:
self.is_mine = True
self.amount_str = format_amount(-amount)
self.fee_str = format_amount(fee) if fee is not None else _('unknown')
self.can_sign = self.wallet.can_sign(self.tx)
self.ids.output_list.update(self.tx.outputs())
def do_rbf(self):
from bump_fee_dialog import BumpFeeDialog
is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(self.tx)
size = self.tx.estimated_size()
d = BumpFeeDialog(self.app, fee, size, self._do_rbf)
d.open()
def _do_rbf(self, old_fee, new_fee, is_final):
if new_fee is None:
return
delta = new_fee - old_fee
if delta < 0:
self.app.show_error("fee too low")
return
try:
new_tx = self.wallet.bump_fee(self.tx, delta)
except BaseException as e:
self.app.show_error(e)
return
if is_final:
new_tx.set_sequence(0xffffffff)
self.tx = new_tx
self.update()
self.do_sign()
def do_sign(self):
self.app.protected(_("Enter your PIN code in order to sign this transaction"), self._do_sign, ())
def _do_sign(self, password):
self.status_str = _('Signing') + '...'
Clock.schedule_once(lambda dt: self.__do_sign(password), 0.1)
def __do_sign(self, password):
try:
self.app.wallet.sign_transaction(self.tx, password)
except InvalidPassword:
self.app.show_error(_("Invalid PIN"))
self.update()
def do_broadcast(self):
self.app.broadcast(self.tx)
def show_qr(self):
from electrum.bitcoin import base_encode
text = str(self.tx).decode('hex')
text = base_encode(text, base=43)
self.app.qr_dialog(_("Raw Transaction"), text)
| on_open |
aws.go | // Copyright 2015 The Vanadium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package cloudvm provides functions to test whether the current process is
// running on Google Compute Engine or Amazon Web Services, and to extract
// settings from this environment.
package cloudvm
import (
"context"
"encoding/json"
"fmt"
"io"
"net"
"net/http"
"sync"
"time"
"v.io/v23/logging"
"v.io/x/ref/lib/stats"
"v.io/x/ref/runtime/internal/cloudvm/cloudpaths"
)
var awsHost string = cloudpaths.AWSHost
// SetAWSMetadataHost can be used to override the default metadata host
// for testing purposes.
func SetAWSMetadataHost(host string) {
awsHost = host
}
func awsMetadataHost() string {
return awsHost
}
func awsExternalURL() string {
return awsMetadataHost() + cloudpaths.AWSPublicIPPath
}
func awsInternalURL() string {
return awsMetadataHost() + cloudpaths.AWSPrivateIPPath
}
func awsIdentityDocURL() string {
return awsMetadataHost() + cloudpaths.AWSIdentityDocPath
}
func awsTokenURL() string {
return awsMetadataHost() + cloudpaths.AWSTokenPath
}
const (
// AWSAccountIDStatName is the name of a v.io/x/ref/lib/stats
// string variable containing the account id.
AWSAccountIDStatName = "system/aws/account-id"
// AWSRegionStatName is the name of a v.io/x/ref/lib/stats
// string variable containing the region.
AWSRegionStatName = "system/aws/zone"
)
var (
onceAWS sync.Once
onAWS bool
onIMDSv2 bool
)
// OnAWS returns true if this process is running on Amazon Web Services.
// If true, the the stats variables AWSAccountIDStatName and GCPRegionStatName
// are set.
func OnAWS(ctx context.Context, logger logging.Logger, timeout time.Duration) bool {
onceAWS.Do(func() {
onAWS, onIMDSv2 = awsInit(ctx, logger, timeout)
logger.VI(1).Infof("OnAWS: onAWS: %v, onIMDSv2: %v", onAWS, onIMDSv2)
})
return onAWS
}
// AWSPublicAddrs returns the current public IP of this AWS instance.
// Must be called after OnAWS.
func AWSPublicAddrs(ctx context.Context, timeout time.Duration) ([]net.Addr, error) {
return awsGetAddr(ctx, onIMDSv2, awsExternalURL(), timeout)
}
// AWSPrivateAddrs returns the current private Addrs of this AWS instance.
// Must be called after OnAWS.
func AWSPrivateAddrs(ctx context.Context, timeout time.Duration) ([]net.Addr, error) {
return awsGetAddr(ctx, onIMDSv2, awsInternalURL(), timeout)
}
func awsGet(ctx context.Context, imdsv2 bool, url string, timeout time.Duration) ([]byte, error) {
client := &http.Client{Timeout: timeout}
var token string
var err error
if imdsv2 {
token, err = awsSetIMDSv2Token(ctx, awsTokenURL(), timeout)
if err != nil {
return nil, err
}
}
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
return nil, err
}
if len(token) > 0 {
req.Header.Add("X-aws-ec2-metadata-token", token)
}
resp, err := client.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return nil, fmt.Errorf("HTTP Error: %v %v", url, resp.StatusCode)
}
if server := resp.Header["Server"]; len(server) != 1 || server[0] != "EC2ws" {
return nil, fmt.Errorf("wrong headers")
}
return io.ReadAll(resp.Body)
}
// awsInit returns true if it can access AWS project metadata and the version
// of the metadata service it was able to access. It also
// creates two stats variables with the account ID and zone.
func awsInit(ctx context.Context, logger logging.Logger, timeout time.Duration) (bool, bool) {
v2 := false
// Try the v1 service first since it should always work unless v2
// is specifically configured (and hence v1 is disabled), in which
// case the expectation is that it fails fast with a 4xx HTTP error.
body, err := awsGet(ctx, false, awsIdentityDocURL(), timeout)
if err != nil {
logger.VI(1).Infof("failed to access v1 metadata service: %v", err)
// can't access v1, try v2.
body, err = awsGet(ctx, true, awsIdentityDocURL(), timeout)
if err != nil {
logger.VI(1).Infof("failed to access v2 metadata service: %v", err)
return false, false
}
v2 = true
}
doc := map[string]interface{}{}
if err := json.Unmarshal(body, &doc); err != nil {
logger.VI(1).Infof("failed to unmarshal metadata service response: %s: %v", body, err)
return false, false
}
found := 0
for _, v := range []struct {
name, key string
}{
{AWSAccountIDStatName, "accountId"},
{AWSRegionStatName, "region"},
} {
if _, present := doc[v.key]; present {
if val, ok := doc[v.key].(string); ok {
found++
stats.NewString(v.name).Set(val)
}
}
}
return found == 2, v2
}
func awsGetAddr(ctx context.Context, imdsv2 bool, url string, timeout time.Duration) ([]net.Addr, error) |
func awsSetIMDSv2Token(ctx context.Context, url string, timeout time.Duration) (string, error) {
client := &http.Client{Timeout: timeout}
req, err := http.NewRequestWithContext(ctx, "PUT", url, nil)
if err != nil {
return "", err
}
req.Header.Add("X-aws-ec2-metadata-token-ttl-seconds", "60")
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return "", err
}
token, err := io.ReadAll(resp.Body)
if err != nil {
return "", err
}
return string(token), nil
}
| {
body, err := awsGet(ctx, imdsv2, url, timeout)
if err != nil {
return nil, err
}
ip := net.ParseIP(string(body))
if len(ip) == 0 {
return nil, nil
}
return []net.Addr{&net.IPAddr{IP: ip}}, nil
} |
formats.rs | use anyhow::{bail, Error as AnyError, Result};
use chrono::prelude::*;
use serde::{Deserialize, Serialize};
use std::str::FromStr;
pub trait InputRecord {
fn get_date(&self) -> &String;
fn get_amount(&self) -> f64;
}
#[derive(Debug, Deserialize)]
pub struct | {
#[serde(rename = "Event ID")]
pub event_id: String,
#[serde(rename = "Date")]
pub date: String,
#[serde(rename = "Block")]
pub block: u64,
#[serde(rename = "Extrinsic Hash")]
pub extrinsic: String,
#[serde(rename = "Value")]
pub amount: f64,
#[serde(rename = "Action")]
pub action: String,
}
impl InputRecord for Subscan {
fn get_date(&self) -> &String {
&self.date
}
fn get_amount(&self) -> f64 {
self.amount
}
}
#[derive(Debug, Deserialize)]
pub struct Kraken {
pub txid: String,
pub refid: String,
#[serde(rename = "time")]
pub date: String,
#[serde(rename = "type")]
pub action: String,
pub aclass: String,
pub asset: String,
pub amount: f64,
pub fee: f64,
}
impl InputRecord for Kraken {
fn get_date(&self) -> &String {
&self.date
}
fn get_amount(&self) -> f64 {
self.amount
}
}
#[derive(Debug, Deserialize)]
pub enum InputFormat {
Subscan,
Kraken,
}
#[derive(Debug)]
pub enum OutputFormat {
BitcoinTax,
}
#[derive(Debug)]
pub enum Quarter {
Q1,
Q2,
Q3,
Q4,
ALL,
}
impl FromStr for Quarter {
type Err = AnyError;
fn from_str(s: &str) -> Result<Self> {
match &s.to_lowercase()[..] {
"q1" | "1" => Ok(Quarter::Q1),
"q2" | "2" => Ok(Quarter::Q2),
"q3" | "3" => Ok(Quarter::Q3),
"q4" | "4" => Ok(Quarter::Q4),
"all" => Ok(Quarter::ALL),
_ => bail!("Invalid quarter!"),
}
}
}
#[derive(Debug, Serialize)]
pub enum OutputRecord {
BT(BitcoinTax),
}
#[derive(Debug, Serialize)]
pub struct BitcoinTax {
date: NaiveDateTime,
action: String,
account: String,
symbol: Coin,
volume: f64,
}
impl BitcoinTax {
pub fn create(date: NaiveDateTime, volume: f64, symbol: Coin) -> Self {
Self {
date,
action: "INCOME".into(),
account: "Polkadot Staking".into(),
symbol,
volume,
}
}
}
#[derive(Debug, Serialize)]
enum Currency {
USD,
GBP,
EUR,
}
impl FromStr for Currency {
type Err = AnyError;
fn from_str(s: &str) -> Result<Self> {
match &s.to_lowercase()[..] {
"usd" => Ok(Currency::USD),
"gbp" => Ok(Currency::GBP),
"eur" => Ok(Currency::EUR),
_ => bail!("Invalid currency type!"),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Deserialize, Serialize)]
pub enum Coin {
DOT,
KSM,
ATOM,
ETH,
SOL,
KAVA,
ADA,
XTZ,
}
impl FromStr for Coin {
type Err = AnyError;
fn from_str(s: &str) -> Result<Self> {
match &s.to_lowercase()[..] {
"dot" | "dot.s" => Ok(Coin::DOT),
"ksm" | "ksm.s" => Ok(Coin::KSM),
"atom" | "atom.s" => Ok(Coin::ATOM),
"eth" | "eth.s" | "eth2" | "eth2.s" => Ok(Coin::ETH),
"sol" | "sol.s" => Ok(Coin::SOL),
"kava" | "kava.s" => Ok(Coin::KAVA),
"ada" | "ada.s" => Ok(Coin::ADA),
"xtz" | "xtz.s" => Ok(Coin::XTZ),
_ => bail!("Invalid coin type!"),
}
}
}
| Subscan |
token.rs | use std::iter::{
Peekable,
IntoIterator
};
use crate::parser::parse_error::UnexpectedToken;
use nom_locate::LocatedSpan;
pub type Span<'a> = LocatedSpan<&'a str>;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Op {
//Usual Arith
Add,
Sub,
Mul,
Div,
Mod,
//Comparison
Eq,
Ne,
Gte,
Lte,
Gt,
Lt,
//Unary
Deref
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OpAssociativity {
Left,
Right
}
impl Op {
pub fn precedence(self) -> i8 {
//Just following C for now, might be tweaks in the future
match self {
Op::Deref => 1,
Op::Mul | Op::Div | Op::Mod => 2,
Op::Add | Op::Sub => 3,
Op::Gt | Op::Lt | Op::Gte | Op::Lte => 4,
Op::Eq | Op::Ne => 5
}
}
pub fn associativity(self) -> OpAssociativity {
//Don't have any right assoc operators yet
OpAssociativity::Left
}
pub fn is_unary(self) -> bool {
match self {
Op::Deref => true,
_ => false
}
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TokenValue<'a> {
Operator(Op),
Identifier(&'a str),
ConstInt(i64),
ConstBool(bool),
ConstDouble(f64),
//Key words
Function,
If,
Else,
//Symbols
Comma,
Semicolon,
Colon,
RArrow, //->
LParen, //(
RParen, //)
LCBracket, //{
RCBracket, //}
//Types
Unit,
Int,
Double,
Bool,
//Misc
Comment,
Eof
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TokenType {
Identifier,
Operator,
ConstInt,
ConstBool,
ConstDouble,
//Key words
Function,
If,
Else,
//Symbols
Comma,
Semicolon,
Colon,
RArrow,
LParen, //(
RParen, //)
LBrace, //{
RBrace, //}
//Types
Unit,
Int,
Double,
Bool,
//Misc
Comment,
Eof
}
impl<'a> TokenValue<'a> {
pub fn token_type(&self) -> TokenType {
match self {
TokenValue::Operator(_) => TokenType::Operator,
TokenValue::Identifier(_) => TokenType::Identifier,
TokenValue::ConstInt(_) => TokenType::ConstInt,
TokenValue::ConstBool(_) => TokenType::ConstBool,
TokenValue::ConstDouble(_) => TokenType::ConstDouble,
TokenValue::Function => TokenType::Function,
TokenValue::If => TokenType::If,
TokenValue::Else => TokenType::Else,
TokenValue::Comma => TokenType::Comma,
TokenValue::Semicolon => TokenType::Semicolon,
TokenValue::Colon => TokenType::Colon,
TokenValue::RArrow => TokenType::RArrow,
TokenValue::LParen => TokenType::LParen,
TokenValue::RParen => TokenType::RParen,
TokenValue::LCBracket => TokenType::LBrace,
TokenValue::RCBracket => TokenType::RBrace,
TokenValue::Unit => TokenType::Unit,
TokenValue::Int => TokenType::ConstInt,
TokenValue::Double => TokenType::Double,
TokenValue::Bool => TokenType::Bool,
TokenValue::Comment => TokenType::Comment,
TokenValue::Eof => TokenType::Eof
}
}
}
impl TokenType {
pub fn is_constant(self) -> bool {
match self {
TokenType::ConstInt |
TokenType::ConstBool |
TokenType::ConstDouble |
TokenType::Unit => true,
_ => false
}
}
}
#[derive(Clone, Debug)]
pub struct Token<'a> {
pub pos: Span<'a>,
pub value: TokenValue<'a>
}
impl<'a> Token<'a> {
pub fn operator(op: Op, pos: Span) -> Token {
Token {
pos: pos,
value: TokenValue::Operator(op)
}
}
pub fn identifier(iden: &'a str, pos: Span<'a>) -> Token<'a> {
Token {
pos: pos,
value: TokenValue::Identifier(iden)
}
}
pub fn const_int(value: i64, pos: Span) -> Token {
Token {
pos: pos,
value: TokenValue::ConstInt(value)
}
}
pub fn eof() -> Token<'static> {
Token {
pos: Span::new(""),
value: TokenValue::Eof
}
}
}
pub type TokenResult<'a, T> = Result<T, UnexpectedToken<'a>>;
#[derive(Debug)]
pub struct TokenStream<'a, T: IntoIterator<Item = Token<'a>>> {
iter: Peekable<T::IntoIter>
}
impl<'a, T: IntoIterator<Item = Token<'a>>> TokenStream<'a, T> {
pub fn new(tokens: T) -> TokenStream<'a, T> {
TokenStream {
iter: tokens.into_iter().peekable()
}
}
pub fn peek_token_type(&mut self) -> Option<TokenType> {
while let Some(token_type) = self.iter.peek().map(|token| token.value.token_type()) {
if token_type == TokenType::Comment {
let _ = self.consume();
}
else {
return Some(token_type);
}
}
None
}
pub fn consume(&mut self) -> Option<Token<'a>> {
self.iter.next()
}
pub fn skip_comments(&mut self) {
while let Some(token_type) = self.peek_token_type() {
if token_type == TokenType::Comment {
let _ = self.consume();
}
else {
break;
}
}
}
pub fn | (&mut self) -> TokenResult<'a, Op> {
self.skip_comments();
if let Some(token) = self.iter.next() {
if let Token { value: TokenValue::Operator(op), .. } = token {
Ok(op)
}
else {
Err(UnexpectedToken::single(TokenType::Operator, token))
}
}
else {
Err(UnexpectedToken::single(TokenType::Operator, Token::eof()))
}
}
pub fn consume_op(&mut self) -> (Token<'a>, Op) {
let token = self.iter.next();
if let Some(Token { value: TokenValue::Operator(op), .. }) = token {
(token.unwrap(), op)
}
else {
panic!("consume_op: next token is not an operator")
}
}
pub fn expect_identifier(&mut self) -> TokenResult<'a, &'a str> {
self.skip_comments();
if let Some(token) = self.iter.next() {
if let Token { value: TokenValue::Identifier(iden), .. } = token {
Ok(iden)
}
else {
Err(UnexpectedToken::single(TokenType::Identifier, token))
}
}
else {
Err(UnexpectedToken::single(TokenType::Identifier, Token::eof()))
}
}
pub fn consume_identifier(&mut self) -> (Token<'a>, &'a str) {
let token = self.iter.next();
if let Some(Token { value: TokenValue::Identifier(iden), .. }) = token {
(token.unwrap(), iden)
}
else {
panic!("consume_identifer: next token is not an identifier");
}
}
pub fn expect_int(&mut self) -> TokenResult<'a, i64> {
self.skip_comments();
if let Some(token) = self.iter.next() {
if let Token { value: TokenValue::ConstInt(value), .. } = token {
Ok(value)
}
else {
Err(UnexpectedToken::single(TokenType::ConstInt, token))
}
}
else {
Err(UnexpectedToken::single(TokenType::ConstInt, Token::eof()))
}
}
pub fn consume_int(&mut self) -> (Token<'a>, i64) {
let token = self.iter.next();
if let Some(Token { value: TokenValue::ConstInt(value), .. }) = token {
(token.unwrap(), value)
}
else {
panic!("consume_int: next token is not an int")
}
}
pub fn consume_bool(&mut self) -> (Token<'a>, bool) {
let token = self.iter.next();
if let Some(Token { value: TokenValue::ConstBool(value), .. }) = token {
(token.unwrap(), value)
}
else {
panic!("consume_bool: next token is not a boolean")
}
}
pub fn consume_double(&mut self) -> (Token<'a>, f64) {
let token = self.iter.next();
if let Some(Token { value: TokenValue::ConstDouble(value), .. }) = token {
(token.unwrap(), value)
}
else {
panic!("consume_double: next token is not a double")
}
}
pub fn consume_unit(&mut self) -> Token<'a> {
let token = self.iter.next();
if let Some(Token { value: TokenValue::Unit, .. }) = token {
token.unwrap()
}
else {
panic!("consume_unit: next token is not unit")
}
}
pub fn expect_function(&mut self) -> TokenResult<'a, ()> {
self.skip_comments();
if let Some(token) = self.iter.next() {
match token.value {
TokenValue::Function => Ok(()),
_ => Err(UnexpectedToken::single(TokenType::Function, token))
}
}
else {
Err(UnexpectedToken::single(TokenType::Function, Token::eof()))
}
}
pub fn expect_lparen(&mut self) -> TokenResult<'a, ()> {
self.skip_comments();
if let Some(token) = self.iter.next() {
match token.value {
TokenValue::LParen => Ok(()),
_ => Err(UnexpectedToken::single(TokenType::LParen, token))
}
}
else {
Err(UnexpectedToken::single(TokenType::LParen, Token::eof()))
}
}
pub fn expect_colon(&mut self) -> TokenResult<'a, ()> {
self.skip_comments();
if let Some(token) = self.iter.next() {
match token.value {
TokenValue::Colon => Ok(()),
_ => Err(UnexpectedToken::single(TokenType::Colon, token))
}
}
else {
Err(UnexpectedToken::single(TokenType::Colon, Token::eof()))
}
}
} | expect_op |
airflow_setup.py | #!/usr/bin/env python
import sys, os, pwd, grp, signal, time, base64
from resource_management import *
from resource_management.core.exceptions import Fail
from resource_management.core.logger import Logger
from resource_management.core.resources.system import Execute, Directory, File
from resource_management.core.shell import call
from resource_management.core.system import System
from resource_management.libraries.functions.default import default
def airflow_make_systemd_scripts_webserver(env):
import params
env.set_params(params)
confFileText = format("""[Unit]
Description=Airflow webserver daemon
After=network.target postgresql.service mysql.service redis.service rabbitmq-server.service
Wants=postgresql.service mysql.service redis.service rabbitmq-server.service
[Service]
EnvironmentFile=/etc/sysconfig/airflow
User={airflow_user}
Group={airflow_group}
Type=simple
ExecStart={conda_root}/envs/{conda_airflow_virtualenv}/bin/airflow webserver -D --pid /usr/local/airflow/airflow-webserver.pid --stderr /var/log/airflow/webserver.err --stdout /var/log/airflow/webserver.out -l /var/log/airflow/webserver.log
PIDFile=/usr/local/airflow/airflow-webserver.pid
Restart=always
RestartSec=5s
SyslogIdentifier=airflow-scheduler
[Install]
WantedBy=multi-user.target
""")
with open("/etc/systemd/system/multi-user.target.wants/airflow-webserver.service", 'w') as configFile:
configFile.write(confFileText)
configFile.close()
confFileText = format("""AIRFLOW_HOME={airflow_home}
AIRFLOW_CONFIG={airflow_home}/airflow.cfg
PATH={conda_root}/envs/{conda_airflow_virtualenv}/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin
""")
with open("/etc/sysconfig/airflow", 'w') as configFile:
configFile.write(confFileText)
configFile.close()
Execute("systemctl daemon-reload")
def airflow_make_systemd_scripts_scheduler(env):
import params
env.set_params(params)
confFileText = format("""[Unit]
Description=Airflow scheduler daemon
After=network.target postgresql.service mysql.service redis.service rabbitmq-server.service
Wants=postgresql.service mysql.service redis.service rabbitmq-server.service
[Service]
EnvironmentFile=/etc/sysconfig/airflow
User={airflow_user}
Group={airflow_group}
Type=simple
ExecStart={conda_root}/envs/{conda_airflow_virtualenv}/bin/airflow scheduler -D --pid /usr/local/airflow/airflow-scheduler.pid --stderr /var/log/airflow/scheduler.err --stdout /var/log/airflow/scheduler.out -l /var/log/airflow/scheduler.log
PIDFile=/usr/local/airflow/airflow-scheduler.pid
Restart=always
RestartSec=5s
SyslogIdentifier=airflow-scheduler
[Install]
WantedBy=multi-user.target
""")
with open("/etc/systemd/system/multi-user.target.wants/airflow-scheduler.service", 'w') as configFile:
configFile.write(confFileText)
configFile.close()
confFileText = format("""AIRFLOW_HOME={airflow_home}
AIRFLOW_CONFIG={airflow_home}/airflow.cfg
PATH={conda_root}/envs/{conda_airflow_virtualenv}/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin
""")
with open("/etc/sysconfig/airflow", 'w') as configFile:
configFile.write(confFileText)
configFile.close()
Execute("systemctl daemon-reload")
def airflow_make_systemd_scripts_worker(env):
import params
env.set_params(params)
confFileText = format("""[Unit]
Description=Airflow worker daemon
After=network.target postgresql.service mysql.service redis.service rabbitmq-server.service
Wants=postgresql.service mysql.service redis.service rabbitmq-server.service
[Service]
EnvironmentFile=/etc/sysconfig/airflow
User={airflow_user}
Group={airflow_group}
Type=simple
ExecStart={conda_root}/envs/{conda_airflow_virtualenv}/bin/airflow worker -D --pid /usr/local/airflow/airflow-worker.pid --stderr /var/log/airflow/worker.err --stdout /var/log/airflow/worker.out -l /var/log/airflow/worker.log
PIDFile=/usr/local/airflow/airflow-worker.pid
Restart=always
RestartSec=5s
SyslogIdentifier=airflow-worker
[Install]
WantedBy=multi-user.target
""")
with open("/etc/systemd/system/multi-user.target.wants/airflow-worker.service", 'w') as configFile:
configFile.write(confFileText)
configFile.close()
confFileText = format("""AIRFLOW_HOME={airflow_home}
AIRFLOW_CONFIG={airflow_home}/airflow.cfg
PATH={conda_root}/envs/{conda_airflow_virtualenv}/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin
""")
with open("/etc/sysconfig/airflow", 'w') as configFile:
configFile.write(confFileText)
configFile.close()
Execute("systemctl daemon-reload")
def airflow_generate_config_for_section(sections):
"""
Generating values for airflow.cfg for each section.
This allows to add custom-site configuration from ambari to cfg file.
"""
result = {}
for section, data in sections.items():
section_config = ""
for key, value in data.items():
section_config += format("{key} = {value}\n")
result[section] = section_config
return result
def | (env):
import params
env.set_params(params)
airflow_config_file = ""
airflow_config = airflow_generate_config_for_section({
"core" : params.config['configurations']['airflow-core-site'],
"cli" : params.config['configurations']['airflow-cli-site'],
"api" : params.config['configurations']['airflow-api-site'],
"operators" : params.config['configurations']['airflow-operators-site'],
"webserver" : params.config['configurations']['airflow-webserver-site'],
"email" : params.config['configurations']['airflow-email-site'],
"smtp" : params.config['configurations']['airflow-smtp-site'],
"celery" : params.config['configurations']['airflow-celery-site'],
"dask" : params.config['configurations']['airflow-dask-site'],
"scheduler" : params.config['configurations']['airflow-scheduler-site'],
"ldap" : params.config['configurations']['airflow-ldap-site'],
"mesos" : params.config['configurations']['airflow-mesos-site'],
"kerberos" : params.config['configurations']['airflow-kerberos-site'],
"github_enterprise" : params.config['configurations']['airflow-githubenterprise-site'],
"admin" : params.config['configurations']['airflow-admin-site'],
"lineage" : params.config['configurations']['airflow-lineage-site'],
"atlas" : params.config['configurations']['airflow-atlas-site'],
"hive" : params.config['configurations']['airflow-hive-site'],
"celery_broker_transport_options" : params.config['configurations']['airflow-celerybrokertransportoptions-site'],
"elasticsearch" : params.config['configurations']['airflow-elasticsearch-site'],
"kubernetes" : params.config['configurations']['airflow-kubernetes-site'],
"kubernetes_secrets" : params.config['configurations']['airflow-kubernetessecrets-site']
})
for section, value in airflow_config.items():
airflow_config_file += format("[{section}]\n{value}\n")
with open(params.airflow_home + "/airflow.cfg", 'w') as configFile:
configFile.write(airflow_config_file)
configFile.close()
| airflow_configure |
merge_shuff_split.py | '''
This file
1. Reads in data from <data_dir>/<subfolder_name>
2. Merges ALL data together, shuffles the lines, and outputs the data into small chuns into <data_dir>/<subfolder_name>/<train/dev/test>
After this, run generate_slices.py
Example run command:
'''
import argparse
import collections
import copy
import hashlib
import os
import random
import shutil
import time
import ujson as json
from tqdm import tqdm
import bootleg_data_prep.utils.utils as utils
import bootleg_data_prep.utils.data_prep_utils as prep_utils
from bootleg_data_prep.language import ENSURE_ASCII
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--split', type=int, default=2, choices=range(1, 100), help='Percent for test and dev data; rest goes to train')
parser.add_argument('--data_dir', type=str, default='data/wiki_dump', help='Directory for data is.')
parser.add_argument('--subfolder_name', type=str, default="filtered_data")
parser.add_argument('--without_weak_label', action='store_true', help='Whether to remove non-golds for training data or not. By default they are kept in.')
parser.add_argument('--hash_keys', nargs="+", default=['parent_qid', 'parent_title'], help='What keys should be use to shuffle the data')
parser.add_argument('--sentence_split', action='store_true', help='Whether to make the hash key be based on sentence as well as page')
parser.add_argument('--bytes', type=str, default='10M')
parser.add_argument('--seed', type=int, default=0)
args = parser.parse_args()
return args
def my_hash(key):
return int(hashlib.sha1(key).hexdigest(), 16)
def | (sent):
# Remove all aliases that are not golds!
new_sent = copy.deepcopy(sent)
new_sent['aliases'] = []
new_sent['char_spans'] = []
new_sent['qids'] = []
new_sent['gold'] = []
removed = 0
for i in range(len(sent['gold'])):
if sent['gold'][i] is True:
new_sent['aliases'].append(sent['aliases'][i])
new_sent['char_spans'].append(sent['char_spans'][i])
new_sent['qids'].append(sent['qids'][i])
new_sent['gold'].append(sent['gold'][i])
else:
removed += 1
return new_sent, removed
def unswap_gold_aliases(sent):
# Return the aliases to the original one for gold aliases (we swap for training to increase conflict)
for i in range(len(sent['gold'])):
if sent['gold'][i] is True:
if sent.get('unswap_aliases', 'aliases')[i] != sent['aliases'][i]:
sent['aliases'][i] = sent.get('unswap_aliases', 'aliases')[i]
return sent
def main():
gl_start = time.time()
args = parse_args()
print(json.dumps(vars(args), indent=4))
random.seed(args.seed)
load_dir = os.path.join(args.data_dir, args.subfolder_name)
print(f"Loading data from {load_dir}...")
files = prep_utils.glob_files(f"{load_dir}/*")
print(f"Loaded {len(files)} files.")
out_dir = os.path.join(args.data_dir, f"{args.subfolder_name}")
vars(args)['saved_data_folder'] = out_dir
# Setup stats dir
stats_dir = os.path.join(out_dir, f"stats")
out_file_with = os.path.join(stats_dir, "alias_qid_traindata_withaugment.json")
out_file_without = os.path.join(stats_dir, "alias_qid_traindata_withoutaugment.json")
train_qidcnt_file = os.path.join(out_dir, "qid2cnt.json")
print(f"Will output to {out_file_with} and {out_file_without} and counts to {train_qidcnt_file}")
alias_qid_with = collections.defaultdict(lambda: collections.defaultdict(int))
alias_qid_without = collections.defaultdict(lambda: collections.defaultdict(int))
vars(args)['stats_dir'] = stats_dir
prep_utils.save_config(args, "split_config.json")
# Store the current file handle
# Make the train, dev, and test subfolders
# Get train, dev, test slits
counters = {}
splits = {}
for fold in ["train", "dev", "test"]:
key = os.path.join(out_dir, fold)
if os.path.exists(key):
shutil.rmtree(key)
utils.ensure_dir(key)
counters[key] = tuple([0, open(os.path.join(key, f"out_0.jsonl"), "w", encoding='utf8')])
if fold == "train":
splits[key] = list(range(2*args.split, 100))
elif fold == "dev":
splits[key] = list(range(args.split, 2*args.split))
elif fold == "test":
splits[key] = list(range(0, args.split))
# Get file size for splits
power = 'kmg'.find(args.bytes[-1].lower()) + 1
file_size = int(args.bytes[:-1]) * 1024 ** power
random.seed(args.seed)
# Read in all data, shuffle, and split
start = time.time()
lines = []
for file in tqdm(sorted(files)):
with open(file, "r", encoding="utf-8") as in_f:
for line in in_f:
line_strip = json.loads(line.strip())
hash_keys_for_item = []
for key in args.hash_keys:
hash_keys_for_item.append(line_strip[key])
if args.sentence_split:
hash_keys_for_item = [line_strip["doc_sent_idx"]] + hash_keys_for_item
key = str(tuple(hash_keys_for_item)).encode('utf-8')
lines.append([my_hash(key), line])
print(f"Read data in {time.time() - start} seconds.")
start = time.time()
random.shuffle(lines)
print(f"Shuffled in {time.time() - start} seconds.")
# If data different lengths, this will reset random here
print(f"Starting to write out {len(lines)} lines")
start = time.time()
trainqid2cnt = collections.defaultdict(int)
line_idx = 0
total_removed = 0
for hash_key, line in tqdm(lines):
line = json.loads(line.strip())
spl = hash_key % 100
for key in splits:
if spl in splits[key]:
# if the split is not train, remove all non-gold aliases
if (args.without_weak_label):
line, removed = keep_only_gold_aliases(line)
total_removed += removed
# update train stats
if (key == os.path.join(out_dir, "train")):
for alias, qid, gold in zip(line["aliases"], line["qids"], line["gold"]):
trainqid2cnt[qid] += 1
alias_qid_with[alias][qid] += 1
if gold:
alias_qid_without[alias][qid] += 1
# use unswapped GOLD aliases for test/dev
if (key != os.path.join(out_dir, "train")):
line = unswap_gold_aliases(line)
if len(line['aliases']) == 0:
continue
(idx, out_f) = counters[key]
if out_f.tell() > file_size:
out_f.close()
idx += 1
out_f = open(os.path.join(key, f"out_{idx}.jsonl"), "w", encoding='utf8')
counters[key] = tuple([idx, out_f])
line["sent_idx_unq"] = line_idx
line_idx += 1
out_f.write(json.dumps(line, sort_keys=True, ensure_ascii=ENSURE_ASCII) + "\n")
utils.dump_json_file(out_file_with, alias_qid_with)
utils.dump_json_file(out_file_without, alias_qid_without)
utils.dump_json_file(train_qidcnt_file, trainqid2cnt)
print(f"Finished writing files in {time.time() - start} seconds. Removed {total_removed} non-gold aliases from dev and test and train.")
# Closing files
for key in tqdm(splits):
counters[key][1].close()
print(f"Close files")
print(f"Finished merge_shuff_split in {time.time() - gl_start} seconds.")
if __name__ == '__main__':
main() | keep_only_gold_aliases |
astencode.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types)]
// FIXME: remove this after snapshot, and Results are handled
#![allow(unused_must_use)]
use metadata::common as c;
use metadata::cstore as cstore;
use session::Session;
use metadata::decoder;
use middle::def;
use metadata::encoder as e;
use middle::region;
use metadata::tydecode;
use metadata::tydecode::{DefIdSource, NominalType, TypeWithId, TypeParameter};
use metadata::tydecode::{RegionParameter, UnboxedClosureSource};
use metadata::tyencode;
use middle::mem_categorization::Typer;
use middle::subst;
use middle::subst::VecPerParamSpace;
use middle::typeck::{mod, MethodCall, MethodCallee, MethodOrigin};
use middle::ty::{mod, Ty};
use util::ppaux::ty_to_string;
use syntax::{ast, ast_map, ast_util, codemap, fold};
use syntax::ast_util::PostExpansionMethod;
use syntax::codemap::Span;
use syntax::fold::Folder;
use syntax::parse::token;
use syntax::ptr::P;
use syntax;
use std::io::Seek;
use std::rc::Rc;
use rbml::io::SeekableMemWriter;
use rbml::{reader, writer};
use rbml;
use serialize;
use serialize::{Decodable, Decoder, DecoderHelpers, Encodable};
use serialize::{EncoderHelpers};
#[cfg(test)] use syntax::parse;
#[cfg(test)] use syntax::print::pprust;
struct DecodeContext<'a, 'b, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
cdata: &'b cstore::crate_metadata,
from_id_range: ast_util::IdRange,
to_id_range: ast_util::IdRange
}
trait tr {
fn tr(&self, dcx: &DecodeContext) -> Self;
}
trait tr_intern {
fn tr_intern(&self, dcx: &DecodeContext) -> ast::DefId;
}
pub type Encoder<'a> = writer::Encoder<'a, SeekableMemWriter>;
// ______________________________________________________________________
// Top-level methods.
pub fn encode_inlined_item(ecx: &e::EncodeContext,
rbml_w: &mut Encoder,
ii: e::InlinedItemRef) {
let id = match ii {
e::IIItemRef(i) => i.id,
e::IIForeignRef(i) => i.id,
e::IITraitItemRef(_, &ast::ProvidedMethod(ref m)) => m.id,
e::IITraitItemRef(_, &ast::RequiredMethod(ref m)) => m.id,
e::IITraitItemRef(_, &ast::TypeTraitItem(ref ti)) => ti.ty_param.id,
e::IIImplItemRef(_, &ast::MethodImplItem(ref m)) => m.id,
e::IIImplItemRef(_, &ast::TypeImplItem(ref ti)) => ti.id,
};
debug!("> Encoding inlined item: {} ({})",
ecx.tcx.map.path_to_string(id),
rbml_w.writer.tell());
// Folding could be avoided with a smarter encoder.
let ii = simplify_ast(ii);
let id_range = ast_util::compute_id_range_for_inlined_item(&ii);
rbml_w.start_tag(c::tag_ast as uint);
id_range.encode(rbml_w);
encode_ast(rbml_w, &ii);
encode_side_tables_for_ii(ecx, rbml_w, &ii);
rbml_w.end_tag();
debug!("< Encoded inlined fn: {} ({})",
ecx.tcx.map.path_to_string(id),
rbml_w.writer.tell());
}
impl<'a, 'b, 'c, 'tcx> ast_map::FoldOps for &'a DecodeContext<'b, 'c, 'tcx> {
fn new_id(&self, id: ast::NodeId) -> ast::NodeId {
if id == ast::DUMMY_NODE_ID {
// Used by ast_map to map the NodeInlinedParent.
self.tcx.sess.next_node_id()
} else {
self.tr_id(id)
}
}
fn new_def_id(&self, def_id: ast::DefId) -> ast::DefId {
self.tr_def_id(def_id)
}
fn new_span(&self, span: Span) -> Span {
self.tr_span(span)
}
}
pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata,
tcx: &ty::ctxt<'tcx>,
path: Vec<ast_map::PathElem>,
par_doc: rbml::Doc)
-> Result<&'tcx ast::InlinedItem, Vec<ast_map::PathElem>> {
match par_doc.opt_child(c::tag_ast) {
None => Err(path),
Some(ast_doc) => {
let mut path_as_str = None;
debug!("> Decoding inlined fn: {}::?",
{
// Do an Option dance to use the path after it is moved below.
let s = ast_map::path_to_string(ast_map::Values(path.iter()));
path_as_str = Some(s);
path_as_str.as_ref().map(|x| x.as_slice())
});
let mut ast_dsr = reader::Decoder::new(ast_doc);
let from_id_range = Decodable::decode(&mut ast_dsr).unwrap();
let to_id_range = reserve_id_range(&tcx.sess, from_id_range);
let dcx = &DecodeContext {
cdata: cdata,
tcx: tcx,
from_id_range: from_id_range,
to_id_range: to_id_range
};
let raw_ii = decode_ast(ast_doc);
let ii = ast_map::map_decoded_item(&dcx.tcx.map, path, raw_ii, dcx);
let ident = match *ii {
ast::IIItem(ref i) => i.ident,
ast::IIForeign(ref i) => i.ident,
ast::IITraitItem(_, ref ti) => {
match *ti {
ast::ProvidedMethod(ref m) => m.pe_ident(),
ast::RequiredMethod(ref ty_m) => ty_m.ident,
ast::TypeTraitItem(ref ti) => ti.ty_param.ident,
}
},
ast::IIImplItem(_, ref m) => {
match *m {
ast::MethodImplItem(ref m) => m.pe_ident(),
ast::TypeImplItem(ref ti) => ti.ident,
}
}
};
debug!("Fn named: {}", token::get_ident(ident));
debug!("< Decoded inlined fn: {}::{}",
path_as_str.unwrap(),
token::get_ident(ident));
region::resolve_inlined_item(&tcx.sess, &tcx.region_maps, ii);
decode_side_tables(dcx, ast_doc);
match *ii {
ast::IIItem(ref i) => {
debug!(">>> DECODED ITEM >>>\n{}\n<<< DECODED ITEM <<<",
syntax::print::pprust::item_to_string(&**i));
}
_ => { }
}
Ok(ii)
}
}
}
// ______________________________________________________________________
// Enumerating the IDs which appear in an AST
fn reserve_id_range(sess: &Session,
from_id_range: ast_util::IdRange) -> ast_util::IdRange {
// Handle the case of an empty range:
if from_id_range.empty() { return from_id_range; }
let cnt = from_id_range.max - from_id_range.min;
let to_id_min = sess.reserve_node_ids(cnt);
let to_id_max = to_id_min + cnt;
ast_util::IdRange { min: to_id_min, max: to_id_max }
}
impl<'a, 'b, 'tcx> DecodeContext<'a, 'b, 'tcx> {
pub fn tr_id(&self, id: ast::NodeId) -> ast::NodeId {
/*!
* Translates an internal id, meaning a node id that is known
* to refer to some part of the item currently being inlined,
* such as a local variable or argument. All naked node-ids
* that appear in types have this property, since if something
* might refer to an external item we would use a def-id to
* allow for the possibility that the item resides in another
* crate.
*/
// from_id_range should be non-empty
assert!(!self.from_id_range.empty());
(id - self.from_id_range.min + self.to_id_range.min)
}
pub fn tr_def_id(&self, did: ast::DefId) -> ast::DefId {
/*!
* Translates an EXTERNAL def-id, converting the crate number
* from the one used in the encoded data to the current crate
* numbers.. By external, I mean that it be translated to a
* reference to the item in its original crate, as opposed to
* being translated to a reference to the inlined version of
* the item. This is typically, but not always, what you
* want, because most def-ids refer to external things like
* types or other fns that may or may not be inlined. Note
* that even when the inlined function is referencing itself
* recursively, we would want `tr_def_id` for that
* reference--- conceptually the function calls the original,
* non-inlined version, and trans deals with linking that
* recursive call to the inlined copy.
*
* However, there are a *few* cases where def-ids are used but
* we know that the thing being referenced is in fact *internal*
* to the item being inlined. In those cases, you should use
* `tr_intern_def_id()` below.
*/
decoder::translate_def_id(self.cdata, did)
}
pub fn tr_intern_def_id(&self, did: ast::DefId) -> ast::DefId {
/*!
* Translates an INTERNAL def-id, meaning a def-id that is
* known to refer to some part of the item currently being
* inlined. In that case, we want to convert the def-id to
* refer to the current crate and to the new, inlined node-id.
*/
assert_eq!(did.krate, ast::LOCAL_CRATE);
ast::DefId { krate: ast::LOCAL_CRATE, node: self.tr_id(did.node) }
}
pub fn tr_span(&self, _span: Span) -> Span {
codemap::DUMMY_SP // FIXME (#1972): handle span properly
}
}
impl tr_intern for ast::DefId {
fn tr_intern(&self, dcx: &DecodeContext) -> ast::DefId {
dcx.tr_intern_def_id(*self)
}
}
impl tr for ast::DefId {
fn tr(&self, dcx: &DecodeContext) -> ast::DefId {
dcx.tr_def_id(*self)
}
}
impl tr for Option<ast::DefId> {
fn tr(&self, dcx: &DecodeContext) -> Option<ast::DefId> {
self.map(|d| dcx.tr_def_id(d))
}
}
impl tr for Span {
fn tr(&self, dcx: &DecodeContext) -> Span {
dcx.tr_span(*self)
}
}
trait def_id_encoder_helpers {
fn emit_def_id(&mut self, did: ast::DefId);
}
impl<S:serialize::Encoder<E>, E> def_id_encoder_helpers for S {
fn emit_def_id(&mut self, did: ast::DefId) {
did.encode(self).ok().unwrap()
}
}
trait def_id_decoder_helpers {
fn read_def_id(&mut self, dcx: &DecodeContext) -> ast::DefId;
fn read_def_id_nodcx(&mut self,
cdata: &cstore::crate_metadata) -> ast::DefId;
}
impl<D:serialize::Decoder<E>, E> def_id_decoder_helpers for D {
fn read_def_id(&mut self, dcx: &DecodeContext) -> ast::DefId {
let did: ast::DefId = Decodable::decode(self).ok().unwrap();
did.tr(dcx)
}
fn read_def_id_nodcx(&mut self,
cdata: &cstore::crate_metadata) -> ast::DefId {
let did: ast::DefId = Decodable::decode(self).ok().unwrap();
decoder::translate_def_id(cdata, did)
}
}
// ______________________________________________________________________
// Encoding and decoding the AST itself
//
// The hard work is done by an autogenerated module astencode_gen. To
// regenerate astencode_gen, run src/etc/gen-astencode. It will
// replace astencode_gen with a dummy file and regenerate its
// contents. If you get compile errors, the dummy file
// remains---resolve the errors and then rerun astencode_gen.
// Annoying, I know, but hopefully only temporary.
//
// When decoding, we have to renumber the AST so that the node ids that
// appear within are disjoint from the node ids in our existing ASTs.
// We also have to adjust the spans: for now we just insert a dummy span,
// but eventually we should add entries to the local codemap as required.
fn encode_ast(rbml_w: &mut Encoder, item: &ast::InlinedItem) {
rbml_w.start_tag(c::tag_tree as uint);
item.encode(rbml_w);
rbml_w.end_tag();
}
struct NestedItemsDropper;
impl Folder for NestedItemsDropper {
fn fold_block(&mut self, blk: P<ast::Block>) -> P<ast::Block> {
blk.and_then(|ast::Block {id, stmts, expr, rules, span, ..}| {
let stmts_sans_items = stmts.into_iter().filter_map(|stmt| {
let use_stmt = match stmt.node {
ast::StmtExpr(_, _) | ast::StmtSemi(_, _) => true,
ast::StmtDecl(ref decl, _) => {
match decl.node {
ast::DeclLocal(_) => true,
ast::DeclItem(_) => false,
}
}
ast::StmtMac(..) => panic!("unexpanded macro in astencode")
};
if use_stmt {
Some(stmt)
} else {
None
}
}).collect();
let blk_sans_items = P(ast::Block {
view_items: Vec::new(), // I don't know if we need the view_items
// here, but it doesn't break tests!
stmts: stmts_sans_items,
expr: expr,
id: id,
rules: rules,
span: span,
});
fold::noop_fold_block(blk_sans_items, self)
})
}
}
// Produces a simplified copy of the AST which does not include things
// that we do not need to or do not want to export. For example, we
// do not include any nested items: if these nested items are to be
// inlined, their AST will be exported separately (this only makes
// sense because, in Rust, nested items are independent except for
// their visibility).
//
// As it happens, trans relies on the fact that we do not export
// nested items, as otherwise it would get confused when translating
// inlined items.
fn simplify_ast(ii: e::InlinedItemRef) -> ast::InlinedItem {
let mut fld = NestedItemsDropper;
match ii {
// HACK we're not dropping items.
e::IIItemRef(i) => {
ast::IIItem(fold::noop_fold_item(P(i.clone()), &mut fld)
.expect_one("expected one item"))
}
e::IITraitItemRef(d, ti) => {
ast::IITraitItem(d, match *ti {
ast::ProvidedMethod(ref m) => {
ast::ProvidedMethod(
fold::noop_fold_method(m.clone(), &mut fld)
.expect_one("noop_fold_method must produce \
exactly one method"))
}
ast::RequiredMethod(ref ty_m) => {
ast::RequiredMethod(
fold::noop_fold_type_method(ty_m.clone(), &mut fld)) | }
ast::TypeTraitItem(ref associated_type) => {
ast::TypeTraitItem(
P(fold::noop_fold_associated_type(
(**associated_type).clone(),
&mut fld)))
}
})
}
e::IIImplItemRef(d, m) => {
ast::IIImplItem(d, match *m {
ast::MethodImplItem(ref m) => {
ast::MethodImplItem(
fold::noop_fold_method(m.clone(), &mut fld)
.expect_one("noop_fold_method must produce \
exactly one method"))
}
ast::TypeImplItem(ref td) => {
ast::TypeImplItem(
P(fold::noop_fold_typedef((**td).clone(), &mut fld)))
}
})
}
e::IIForeignRef(i) => {
ast::IIForeign(fold::noop_fold_foreign_item(P(i.clone()), &mut fld))
}
}
}
fn decode_ast(par_doc: rbml::Doc) -> ast::InlinedItem {
let chi_doc = par_doc.get(c::tag_tree as uint);
let mut d = reader::Decoder::new(chi_doc);
Decodable::decode(&mut d).unwrap()
}
// ______________________________________________________________________
// Encoding and decoding of ast::def
fn decode_def(dcx: &DecodeContext, doc: rbml::Doc) -> def::Def {
let mut dsr = reader::Decoder::new(doc);
let def: def::Def = Decodable::decode(&mut dsr).unwrap();
def.tr(dcx)
}
impl tr for def::Def {
fn tr(&self, dcx: &DecodeContext) -> def::Def {
match *self {
def::DefFn(did, is_ctor) => def::DefFn(did.tr(dcx), is_ctor),
def::DefStaticMethod(did, p) => {
def::DefStaticMethod(did.tr(dcx), p.map(|did2| did2.tr(dcx)))
}
def::DefMethod(did0, did1, p) => {
def::DefMethod(did0.tr(dcx),
did1.map(|did1| did1.tr(dcx)),
p.map(|did2| did2.tr(dcx)))
}
def::DefSelfTy(nid) => { def::DefSelfTy(dcx.tr_id(nid)) }
def::DefMod(did) => { def::DefMod(did.tr(dcx)) }
def::DefForeignMod(did) => { def::DefForeignMod(did.tr(dcx)) }
def::DefStatic(did, m) => { def::DefStatic(did.tr(dcx), m) }
def::DefConst(did) => { def::DefConst(did.tr(dcx)) }
def::DefLocal(nid) => { def::DefLocal(dcx.tr_id(nid)) }
def::DefVariant(e_did, v_did, is_s) => {
def::DefVariant(e_did.tr(dcx), v_did.tr(dcx), is_s)
},
def::DefTrait(did) => def::DefTrait(did.tr(dcx)),
def::DefTy(did, is_enum) => def::DefTy(did.tr(dcx), is_enum),
def::DefAssociatedTy(did) => def::DefAssociatedTy(did.tr(dcx)),
def::DefPrimTy(p) => def::DefPrimTy(p),
def::DefTyParam(s, did, v) => def::DefTyParam(s, did.tr(dcx), v),
def::DefUse(did) => def::DefUse(did.tr(dcx)),
def::DefUpvar(nid1, nid2, nid3) => {
def::DefUpvar(dcx.tr_id(nid1),
dcx.tr_id(nid2),
dcx.tr_id(nid3))
}
def::DefStruct(did) => def::DefStruct(did.tr(dcx)),
def::DefRegion(nid) => def::DefRegion(dcx.tr_id(nid)),
def::DefTyParamBinder(nid) => {
def::DefTyParamBinder(dcx.tr_id(nid))
}
def::DefLabel(nid) => def::DefLabel(dcx.tr_id(nid))
}
}
}
// ______________________________________________________________________
// Encoding and decoding of ancillary information
impl tr for ty::Region {
fn tr(&self, dcx: &DecodeContext) -> ty::Region {
match *self {
ty::ReLateBound(debruijn, br) => {
ty::ReLateBound(debruijn, br.tr(dcx))
}
ty::ReEarlyBound(id, space, index, ident) => {
ty::ReEarlyBound(dcx.tr_id(id), space, index, ident)
}
ty::ReScope(scope) => {
ty::ReScope(scope.tr(dcx))
}
ty::ReEmpty | ty::ReStatic | ty::ReInfer(..) => {
*self
}
ty::ReFree(ref fr) => {
ty::ReFree(fr.tr(dcx))
}
}
}
}
impl tr for ty::FreeRegion {
fn tr(&self, dcx: &DecodeContext) -> ty::FreeRegion {
ty::FreeRegion { scope: self.scope.tr(dcx),
bound_region: self.bound_region.tr(dcx) }
}
}
impl tr for region::CodeExtent {
fn tr(&self, dcx: &DecodeContext) -> region::CodeExtent {
self.map_id(|id| dcx.tr_id(id))
}
}
impl tr for ty::BoundRegion {
fn tr(&self, dcx: &DecodeContext) -> ty::BoundRegion {
match *self {
ty::BrAnon(_) |
ty::BrFresh(_) |
ty::BrEnv => *self,
ty::BrNamed(id, ident) => ty::BrNamed(dcx.tr_def_id(id),
ident),
}
}
}
impl tr for ty::TraitStore {
fn tr(&self, dcx: &DecodeContext) -> ty::TraitStore {
match *self {
ty::RegionTraitStore(r, m) => {
ty::RegionTraitStore(r.tr(dcx), m)
}
ty::UniqTraitStore => ty::UniqTraitStore
}
}
}
// ______________________________________________________________________
// Encoding and decoding of freevar information
fn encode_freevar_entry(rbml_w: &mut Encoder, fv: &ty::Freevar) {
(*fv).encode(rbml_w).unwrap();
}
fn encode_capture_mode(rbml_w: &mut Encoder, cm: ast::CaptureClause) {
cm.encode(rbml_w).unwrap();
}
trait rbml_decoder_helper {
fn read_freevar_entry(&mut self, dcx: &DecodeContext)
-> ty::Freevar;
fn read_capture_mode(&mut self) -> ast::CaptureClause;
}
impl<'a> rbml_decoder_helper for reader::Decoder<'a> {
fn read_freevar_entry(&mut self, dcx: &DecodeContext)
-> ty::Freevar {
let fv: ty::Freevar = Decodable::decode(self).unwrap();
fv.tr(dcx)
}
fn read_capture_mode(&mut self) -> ast::CaptureClause {
let cm: ast::CaptureClause = Decodable::decode(self).unwrap();
cm
}
}
impl tr for ty::Freevar {
fn tr(&self, dcx: &DecodeContext) -> ty::Freevar {
ty::Freevar {
def: self.def.tr(dcx),
span: self.span.tr(dcx),
}
}
}
impl tr for ty::UpvarBorrow {
fn tr(&self, dcx: &DecodeContext) -> ty::UpvarBorrow {
ty::UpvarBorrow {
kind: self.kind,
region: self.region.tr(dcx)
}
}
}
// ______________________________________________________________________
// Encoding and decoding of MethodCallee
trait read_method_callee_helper<'tcx> {
fn read_method_callee<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> (typeck::ExprAdjustment, MethodCallee<'tcx>);
}
fn encode_method_callee<'a, 'tcx>(ecx: &e::EncodeContext<'a, 'tcx>,
rbml_w: &mut Encoder,
adjustment: typeck::ExprAdjustment,
method: &MethodCallee<'tcx>) {
use serialize::Encoder;
rbml_w.emit_struct("MethodCallee", 4, |rbml_w| {
rbml_w.emit_struct_field("adjustment", 0u, |rbml_w| {
adjustment.encode(rbml_w)
});
rbml_w.emit_struct_field("origin", 1u, |rbml_w| {
Ok(rbml_w.emit_method_origin(ecx, &method.origin))
});
rbml_w.emit_struct_field("ty", 2u, |rbml_w| {
Ok(rbml_w.emit_ty(ecx, method.ty))
});
rbml_w.emit_struct_field("substs", 3u, |rbml_w| {
Ok(rbml_w.emit_substs(ecx, &method.substs))
})
}).unwrap();
}
impl<'a, 'tcx> read_method_callee_helper<'tcx> for reader::Decoder<'a> {
fn read_method_callee<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> (typeck::ExprAdjustment, MethodCallee<'tcx>) {
self.read_struct("MethodCallee", 4, |this| {
let adjustment = this.read_struct_field("adjustment", 0, |this| {
Decodable::decode(this)
}).unwrap();
Ok((adjustment, MethodCallee {
origin: this.read_struct_field("origin", 1, |this| {
Ok(this.read_method_origin(dcx))
}).unwrap(),
ty: this.read_struct_field("ty", 2, |this| {
Ok(this.read_ty(dcx))
}).unwrap(),
substs: this.read_struct_field("substs", 3, |this| {
Ok(this.read_substs(dcx))
}).unwrap()
}))
}).unwrap()
}
}
impl<'tcx> tr for MethodOrigin<'tcx> {
fn tr(&self, dcx: &DecodeContext) -> MethodOrigin<'tcx> {
match *self {
typeck::MethodStatic(did) => typeck::MethodStatic(did.tr(dcx)),
typeck::MethodStaticUnboxedClosure(did) => {
typeck::MethodStaticUnboxedClosure(did.tr(dcx))
}
typeck::MethodTypeParam(ref mp) => {
typeck::MethodTypeParam(
typeck::MethodParam {
// def-id is already translated when we read it out
trait_ref: mp.trait_ref.clone(),
method_num: mp.method_num,
}
)
}
typeck::MethodTraitObject(ref mo) => {
typeck::MethodTraitObject(
typeck::MethodObject {
trait_ref: mo.trait_ref.clone(),
.. *mo
}
)
}
}
}
}
pub fn encode_unboxed_closure_kind(ebml_w: &mut Encoder,
kind: ty::UnboxedClosureKind) {
use serialize::Encoder;
ebml_w.emit_enum("UnboxedClosureKind", |ebml_w| {
match kind {
ty::FnUnboxedClosureKind => {
ebml_w.emit_enum_variant("FnUnboxedClosureKind", 0, 3, |_| {
Ok(())
})
}
ty::FnMutUnboxedClosureKind => {
ebml_w.emit_enum_variant("FnMutUnboxedClosureKind", 1, 3, |_| {
Ok(())
})
}
ty::FnOnceUnboxedClosureKind => {
ebml_w.emit_enum_variant("FnOnceUnboxedClosureKind",
2,
3,
|_| {
Ok(())
})
}
}
}).unwrap()
}
pub trait vtable_decoder_helpers<'tcx> {
fn read_vec_per_param_space<T>(&mut self,
f: |&mut Self| -> T)
-> VecPerParamSpace<T>;
fn read_vtable_res_with_key(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> (typeck::ExprAdjustment, typeck::vtable_res<'tcx>);
fn read_vtable_res(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> typeck::vtable_res<'tcx>;
fn read_vtable_param_res(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> typeck::vtable_param_res<'tcx>;
fn read_vtable_origin(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> typeck::vtable_origin<'tcx>;
}
impl<'tcx, 'a> vtable_decoder_helpers<'tcx> for reader::Decoder<'a> {
fn read_vec_per_param_space<T>(&mut self,
f: |&mut reader::Decoder<'a>| -> T)
-> VecPerParamSpace<T>
{
let types = self.read_to_vec(|this| Ok(f(this))).unwrap();
let selfs = self.read_to_vec(|this| Ok(f(this))).unwrap();
let assocs = self.read_to_vec(|this| Ok(f(this))).unwrap();
let fns = self.read_to_vec(|this| Ok(f(this))).unwrap();
VecPerParamSpace::new(types, selfs, assocs, fns)
}
fn read_vtable_res_with_key(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> (typeck::ExprAdjustment, typeck::vtable_res<'tcx>) {
self.read_struct("VtableWithKey", 2, |this| {
let adjustment = this.read_struct_field("adjustment", 0, |this| {
Decodable::decode(this)
}).unwrap();
Ok((adjustment, this.read_struct_field("vtable_res", 1, |this| {
Ok(this.read_vtable_res(tcx, cdata))
}).unwrap()))
}).unwrap()
}
fn read_vtable_res(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> typeck::vtable_res<'tcx>
{
self.read_vec_per_param_space(
|this| this.read_vtable_param_res(tcx, cdata))
}
fn read_vtable_param_res(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> typeck::vtable_param_res<'tcx> {
self.read_to_vec(|this| Ok(this.read_vtable_origin(tcx, cdata)))
.unwrap().into_iter().collect()
}
fn read_vtable_origin(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata)
-> typeck::vtable_origin<'tcx> {
self.read_enum("vtable_origin", |this| {
this.read_enum_variant(&["vtable_static",
"vtable_param",
"vtable_error",
"vtable_unboxed_closure"],
|this, i| {
Ok(match i {
0 => {
typeck::vtable_static(
this.read_enum_variant_arg(0u, |this| {
Ok(this.read_def_id_nodcx(cdata))
}).unwrap(),
this.read_enum_variant_arg(1u, |this| {
Ok(this.read_substs_nodcx(tcx, cdata))
}).unwrap(),
this.read_enum_variant_arg(2u, |this| {
Ok(this.read_vtable_res(tcx, cdata))
}).unwrap()
)
}
1 => {
typeck::vtable_param(
this.read_enum_variant_arg(0u, |this| {
Decodable::decode(this)
}).unwrap(),
this.read_enum_variant_arg(1u, |this| {
this.read_uint()
}).unwrap()
)
}
2 => {
typeck::vtable_unboxed_closure(
this.read_enum_variant_arg(0u, |this| {
Ok(this.read_def_id_nodcx(cdata))
}).unwrap()
)
}
3 => {
typeck::vtable_error
}
_ => panic!("bad enum variant")
})
})
}).unwrap()
}
}
// ___________________________________________________________________________
//
fn encode_vec_per_param_space<T>(rbml_w: &mut Encoder,
v: &subst::VecPerParamSpace<T>,
f: |&mut Encoder, &T|) {
for &space in subst::ParamSpace::all().iter() {
rbml_w.emit_from_vec(v.get_slice(space),
|rbml_w, n| Ok(f(rbml_w, n))).unwrap();
}
}
// ______________________________________________________________________
// Encoding and decoding the side tables
trait get_ty_str_ctxt<'tcx> {
fn ty_str_ctxt<'a>(&'a self) -> tyencode::ctxt<'a, 'tcx>;
}
impl<'a, 'tcx> get_ty_str_ctxt<'tcx> for e::EncodeContext<'a, 'tcx> {
fn ty_str_ctxt<'a>(&'a self) -> tyencode::ctxt<'a, 'tcx> {
tyencode::ctxt {
diag: self.tcx.sess.diagnostic(),
ds: e::def_to_string,
tcx: self.tcx,
abbrevs: &self.type_abbrevs
}
}
}
trait rbml_writer_helpers<'tcx> {
fn emit_closure_type<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
closure_type: &ty::ClosureTy<'tcx>);
fn emit_method_origin<'a>(&mut self,
ecx: &e::EncodeContext<'a, 'tcx>,
method_origin: &typeck::MethodOrigin<'tcx>);
fn emit_ty<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, ty: Ty<'tcx>);
fn emit_tys<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, tys: &[Ty<'tcx>]);
fn emit_type_param_def<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
type_param_def: &ty::TypeParameterDef<'tcx>);
fn emit_trait_ref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
ty: &ty::TraitRef<'tcx>);
fn emit_polytype<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
pty: ty::Polytype<'tcx>);
fn emit_substs<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
substs: &subst::Substs<'tcx>);
fn emit_existential_bounds(&mut self, ecx: &e::EncodeContext, bounds: &ty::ExistentialBounds);
fn emit_builtin_bounds(&mut self, ecx: &e::EncodeContext, bounds: &ty::BuiltinBounds);
fn emit_auto_adjustment<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
adj: &ty::AutoAdjustment<'tcx>);
fn emit_autoref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
autoref: &ty::AutoRef<'tcx>);
fn emit_auto_deref_ref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
auto_deref_ref: &ty::AutoDerefRef<'tcx>);
fn emit_unsize_kind<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
uk: &ty::UnsizeKind<'tcx>);
}
impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> {
fn emit_closure_type<'a>(&mut self,
ecx: &e::EncodeContext<'a, 'tcx>,
closure_type: &ty::ClosureTy<'tcx>) {
self.emit_opaque(|this| {
Ok(e::write_closure_type(ecx, this, closure_type))
});
}
fn emit_method_origin<'a>(&mut self,
ecx: &e::EncodeContext<'a, 'tcx>,
method_origin: &typeck::MethodOrigin<'tcx>)
{
use serialize::Encoder;
self.emit_enum("MethodOrigin", |this| {
match *method_origin {
typeck::MethodStatic(def_id) => {
this.emit_enum_variant("MethodStatic", 0, 1, |this| {
Ok(this.emit_def_id(def_id))
})
}
typeck::MethodStaticUnboxedClosure(def_id) => {
this.emit_enum_variant("MethodStaticUnboxedClosure", 1, 1, |this| {
Ok(this.emit_def_id(def_id))
})
}
typeck::MethodTypeParam(ref p) => {
this.emit_enum_variant("MethodTypeParam", 2, 1, |this| {
this.emit_struct("MethodParam", 2, |this| {
try!(this.emit_struct_field("trait_ref", 0, |this| {
Ok(this.emit_trait_ref(ecx, &*p.trait_ref))
}));
try!(this.emit_struct_field("method_num", 0, |this| {
this.emit_uint(p.method_num)
}));
Ok(())
})
})
}
typeck::MethodTraitObject(ref o) => {
this.emit_enum_variant("MethodTraitObject", 3, 1, |this| {
this.emit_struct("MethodObject", 2, |this| {
try!(this.emit_struct_field("trait_ref", 0, |this| {
Ok(this.emit_trait_ref(ecx, &*o.trait_ref))
}));
try!(this.emit_struct_field("object_trait_id", 0, |this| {
Ok(this.emit_def_id(o.object_trait_id))
}));
try!(this.emit_struct_field("method_num", 0, |this| {
this.emit_uint(o.method_num)
}));
try!(this.emit_struct_field("real_index", 0, |this| {
this.emit_uint(o.real_index)
}));
Ok(())
})
})
}
}
});
}
fn emit_ty<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, ty: Ty<'tcx>) {
self.emit_opaque(|this| Ok(e::write_type(ecx, this, ty)));
}
fn emit_tys<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, tys: &[Ty<'tcx>]) {
self.emit_from_vec(tys, |this, ty| Ok(this.emit_ty(ecx, *ty)));
}
fn emit_trait_ref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
trait_ref: &ty::TraitRef<'tcx>) {
self.emit_opaque(|this| Ok(e::write_trait_ref(ecx, this, trait_ref)));
}
fn emit_type_param_def<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
type_param_def: &ty::TypeParameterDef<'tcx>) {
self.emit_opaque(|this| {
Ok(tyencode::enc_type_param_def(this.writer,
&ecx.ty_str_ctxt(),
type_param_def))
});
}
fn emit_polytype<'a>(&mut self,
ecx: &e::EncodeContext<'a, 'tcx>,
pty: ty::Polytype<'tcx>) {
use serialize::Encoder;
self.emit_struct("Polytype", 2, |this| {
this.emit_struct_field("generics", 0, |this| {
this.emit_struct("Generics", 2, |this| {
this.emit_struct_field("types", 0, |this| {
Ok(encode_vec_per_param_space(
this, &pty.generics.types,
|this, def| this.emit_type_param_def(ecx, def)))
});
this.emit_struct_field("regions", 1, |this| {
Ok(encode_vec_per_param_space(
this, &pty.generics.regions,
|this, def| def.encode(this).unwrap()))
})
})
});
this.emit_struct_field("ty", 1, |this| {
Ok(this.emit_ty(ecx, pty.ty))
})
});
}
fn emit_existential_bounds(&mut self, ecx: &e::EncodeContext, bounds: &ty::ExistentialBounds) {
self.emit_opaque(|this| Ok(tyencode::enc_existential_bounds(this.writer,
&ecx.ty_str_ctxt(),
bounds)));
}
fn emit_builtin_bounds(&mut self, ecx: &e::EncodeContext, bounds: &ty::BuiltinBounds) {
self.emit_opaque(|this| Ok(tyencode::enc_builtin_bounds(this.writer,
&ecx.ty_str_ctxt(),
bounds)));
}
fn emit_substs<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
substs: &subst::Substs<'tcx>) {
self.emit_opaque(|this| Ok(tyencode::enc_substs(this.writer,
&ecx.ty_str_ctxt(),
substs)));
}
fn emit_auto_adjustment<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
adj: &ty::AutoAdjustment<'tcx>) {
use serialize::Encoder;
self.emit_enum("AutoAdjustment", |this| {
match *adj {
ty::AdjustAddEnv(store) => {
this.emit_enum_variant("AutoAddEnv", 0, 1, |this| {
this.emit_enum_variant_arg(0, |this| store.encode(this))
})
}
ty::AdjustDerefRef(ref auto_deref_ref) => {
this.emit_enum_variant("AutoDerefRef", 1, 1, |this| {
this.emit_enum_variant_arg(0,
|this| Ok(this.emit_auto_deref_ref(ecx, auto_deref_ref)))
})
}
}
});
}
fn emit_autoref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
autoref: &ty::AutoRef<'tcx>) {
use serialize::Encoder;
self.emit_enum("AutoRef", |this| {
match autoref {
&ty::AutoPtr(r, m, None) => {
this.emit_enum_variant("AutoPtr", 0, 3, |this| {
this.emit_enum_variant_arg(0, |this| r.encode(this));
this.emit_enum_variant_arg(1, |this| m.encode(this));
this.emit_enum_variant_arg(2,
|this| this.emit_option(|this| this.emit_option_none()))
})
}
&ty::AutoPtr(r, m, Some(box ref a)) => {
this.emit_enum_variant("AutoPtr", 0, 3, |this| {
this.emit_enum_variant_arg(0, |this| r.encode(this));
this.emit_enum_variant_arg(1, |this| m.encode(this));
this.emit_enum_variant_arg(2, |this| this.emit_option(
|this| this.emit_option_some(|this| Ok(this.emit_autoref(ecx, a)))))
})
}
&ty::AutoUnsize(ref uk) => {
this.emit_enum_variant("AutoUnsize", 1, 1, |this| {
this.emit_enum_variant_arg(0, |this| Ok(this.emit_unsize_kind(ecx, uk)))
})
}
&ty::AutoUnsizeUniq(ref uk) => {
this.emit_enum_variant("AutoUnsizeUniq", 2, 1, |this| {
this.emit_enum_variant_arg(0, |this| Ok(this.emit_unsize_kind(ecx, uk)))
})
}
&ty::AutoUnsafe(m, None) => {
this.emit_enum_variant("AutoUnsafe", 3, 2, |this| {
this.emit_enum_variant_arg(0, |this| m.encode(this));
this.emit_enum_variant_arg(1,
|this| this.emit_option(|this| this.emit_option_none()))
})
}
&ty::AutoUnsafe(m, Some(box ref a)) => {
this.emit_enum_variant("AutoUnsafe", 3, 2, |this| {
this.emit_enum_variant_arg(0, |this| m.encode(this));
this.emit_enum_variant_arg(1, |this| this.emit_option(
|this| this.emit_option_some(|this| Ok(this.emit_autoref(ecx, a)))))
})
}
}
});
}
fn emit_auto_deref_ref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
auto_deref_ref: &ty::AutoDerefRef<'tcx>) {
use serialize::Encoder;
self.emit_struct("AutoDerefRef", 2, |this| {
this.emit_struct_field("autoderefs", 0, |this| auto_deref_ref.autoderefs.encode(this));
this.emit_struct_field("autoref", 1, |this| {
this.emit_option(|this| {
match auto_deref_ref.autoref {
None => this.emit_option_none(),
Some(ref a) => this.emit_option_some(|this| Ok(this.emit_autoref(ecx, a))),
}
})
})
});
}
fn emit_unsize_kind<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
uk: &ty::UnsizeKind<'tcx>) {
use serialize::Encoder;
self.emit_enum("UnsizeKind", |this| {
match *uk {
ty::UnsizeLength(len) => {
this.emit_enum_variant("UnsizeLength", 0, 1, |this| {
this.emit_enum_variant_arg(0, |this| len.encode(this))
})
}
ty::UnsizeStruct(box ref uk, idx) => {
this.emit_enum_variant("UnsizeStruct", 1, 2, |this| {
this.emit_enum_variant_arg(0, |this| Ok(this.emit_unsize_kind(ecx, uk)));
this.emit_enum_variant_arg(1, |this| idx.encode(this))
})
}
ty::UnsizeVtable(ty::TyTrait { ref principal,
bounds: ref b },
self_ty) => {
this.emit_enum_variant("UnsizeVtable", 2, 4, |this| {
this.emit_enum_variant_arg(0, |this| {
try!(this.emit_struct_field("principal", 0, |this| {
Ok(this.emit_trait_ref(ecx, &*principal))
}));
this.emit_struct_field("bounds", 1, |this| {
Ok(this.emit_existential_bounds(ecx, b))
})
});
this.emit_enum_variant_arg(1, |this| Ok(this.emit_ty(ecx, self_ty)))
})
}
}
});
}
}
trait write_tag_and_id {
fn tag(&mut self, tag_id: c::astencode_tag, f: |&mut Self|);
fn id(&mut self, id: ast::NodeId);
}
impl<'a> write_tag_and_id for Encoder<'a> {
fn tag(&mut self,
tag_id: c::astencode_tag,
f: |&mut Encoder<'a>|) {
self.start_tag(tag_id as uint);
f(self);
self.end_tag();
}
fn id(&mut self, id: ast::NodeId) {
self.wr_tagged_u64(c::tag_table_id as uint, id as u64);
}
}
struct SideTableEncodingIdVisitor<'a, 'b:'a, 'c:'a, 'tcx:'c> {
ecx: &'a e::EncodeContext<'c, 'tcx>,
rbml_w: &'a mut Encoder<'b>,
}
impl<'a, 'b, 'c, 'tcx> ast_util::IdVisitingOperation for
SideTableEncodingIdVisitor<'a, 'b, 'c, 'tcx> {
fn visit_id(&mut self, id: ast::NodeId) {
encode_side_tables_for_id(self.ecx, self.rbml_w, id)
}
}
fn encode_side_tables_for_ii(ecx: &e::EncodeContext,
rbml_w: &mut Encoder,
ii: &ast::InlinedItem) {
rbml_w.start_tag(c::tag_table as uint);
ast_util::visit_ids_for_inlined_item(ii, &mut SideTableEncodingIdVisitor {
ecx: ecx,
rbml_w: rbml_w
});
rbml_w.end_tag();
}
fn encode_side_tables_for_id(ecx: &e::EncodeContext,
rbml_w: &mut Encoder,
id: ast::NodeId) {
let tcx = ecx.tcx;
debug!("Encoding side tables for id {}", id);
for def in tcx.def_map.borrow().get(&id).iter() {
rbml_w.tag(c::tag_table_def, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| (*def).encode(rbml_w).unwrap());
})
}
for &ty in tcx.node_types.borrow().get(&id).iter() {
rbml_w.tag(c::tag_table_node_type, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
rbml_w.emit_ty(ecx, *ty);
})
})
}
for &item_substs in tcx.item_substs.borrow().get(&id).iter() {
rbml_w.tag(c::tag_table_item_subst, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
rbml_w.emit_substs(ecx, &item_substs.substs);
})
})
}
for &fv in tcx.freevars.borrow().get(&id).iter() {
rbml_w.tag(c::tag_table_freevars, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
rbml_w.emit_from_vec(fv.as_slice(), |rbml_w, fv_entry| {
Ok(encode_freevar_entry(rbml_w, fv_entry))
});
})
});
for freevar in fv.iter() {
match tcx.capture_mode(id) {
ast::CaptureByRef => {
rbml_w.tag(c::tag_table_upvar_borrow_map, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
let var_id = freevar.def.def_id().node;
let upvar_id = ty::UpvarId {
var_id: var_id,
closure_expr_id: id
};
let upvar_borrow = tcx.upvar_borrow_map.borrow()[upvar_id].clone();
var_id.encode(rbml_w);
upvar_borrow.encode(rbml_w);
})
})
}
_ => {}
}
}
}
for &cm in tcx.capture_modes.borrow().get(&id).iter() {
rbml_w.tag(c::tag_table_capture_modes, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
encode_capture_mode(rbml_w, *cm);
})
})
}
let lid = ast::DefId { krate: ast::LOCAL_CRATE, node: id };
for &pty in tcx.tcache.borrow().get(&lid).iter() {
rbml_w.tag(c::tag_table_tcache, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
rbml_w.emit_polytype(ecx, pty.clone());
})
})
}
for &type_param_def in tcx.ty_param_defs.borrow().get(&id).iter() {
rbml_w.tag(c::tag_table_param_defs, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
rbml_w.emit_type_param_def(ecx, type_param_def)
})
})
}
let method_call = MethodCall::expr(id);
for &method in tcx.method_map.borrow().get(&method_call).iter() {
rbml_w.tag(c::tag_table_method_map, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
encode_method_callee(ecx, rbml_w, method_call.adjustment, method)
})
})
}
for &trait_ref in tcx.object_cast_map.borrow().get(&id).iter() {
rbml_w.tag(c::tag_table_object_cast_map, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
rbml_w.emit_trait_ref(ecx, &**trait_ref);
})
})
}
for &adjustment in tcx.adjustments.borrow().get(&id).iter() {
match *adjustment {
_ if ty::adjust_is_object(adjustment) => {
let method_call = MethodCall::autoobject(id);
for &method in tcx.method_map.borrow().get(&method_call).iter() {
rbml_w.tag(c::tag_table_method_map, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
encode_method_callee(ecx, rbml_w, method_call.adjustment, method)
})
})
}
}
ty::AdjustDerefRef(ref adj) => {
assert!(!ty::adjust_is_object(adjustment));
for autoderef in range(0, adj.autoderefs) {
let method_call = MethodCall::autoderef(id, autoderef);
for &method in tcx.method_map.borrow().get(&method_call).iter() {
rbml_w.tag(c::tag_table_method_map, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
encode_method_callee(ecx, rbml_w,
method_call.adjustment, method)
})
})
}
}
}
_ => {
assert!(!ty::adjust_is_object(adjustment));
}
}
rbml_w.tag(c::tag_table_adjustments, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
rbml_w.emit_auto_adjustment(ecx, adjustment);
})
})
}
for unboxed_closure in tcx.unboxed_closures
.borrow()
.get(&ast_util::local_def(id))
.iter() {
rbml_w.tag(c::tag_table_unboxed_closures, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
rbml_w.emit_closure_type(ecx, &unboxed_closure.closure_type);
encode_unboxed_closure_kind(rbml_w, unboxed_closure.kind)
})
})
}
}
trait doc_decoder_helpers {
fn as_int(&self) -> int;
fn opt_child(&self, tag: c::astencode_tag) -> Option<Self>;
}
impl<'a> doc_decoder_helpers for rbml::Doc<'a> {
fn as_int(&self) -> int { reader::doc_as_u64(*self) as int }
fn opt_child(&self, tag: c::astencode_tag) -> Option<rbml::Doc<'a>> {
reader::maybe_get_doc(*self, tag as uint)
}
}
trait rbml_decoder_decoder_helpers<'tcx> {
fn read_method_origin<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> typeck::MethodOrigin<'tcx>;
fn read_ty<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> Ty<'tcx>;
fn read_tys<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> Vec<Ty<'tcx>>;
fn read_trait_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> Rc<ty::TraitRef<'tcx>>;
fn read_type_param_def<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::TypeParameterDef<'tcx>;
fn read_polytype<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::Polytype<'tcx>;
fn read_existential_bounds<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::ExistentialBounds;
fn read_substs<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> subst::Substs<'tcx>;
fn read_auto_adjustment<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::AutoAdjustment<'tcx>;
fn read_unboxed_closure<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::UnboxedClosure<'tcx>;
fn read_auto_deref_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::AutoDerefRef<'tcx>;
fn read_autoref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::AutoRef<'tcx>;
fn read_unsize_kind<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::UnsizeKind<'tcx>;
fn convert_def_id(&mut self,
dcx: &DecodeContext,
source: DefIdSource,
did: ast::DefId)
-> ast::DefId;
// Versions of the type reading functions that don't need the full
// DecodeContext.
fn read_ty_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata) -> Ty<'tcx>;
fn read_tys_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata) -> Vec<Ty<'tcx>>;
fn read_substs_nodcx(&mut self, tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> subst::Substs<'tcx>;
}
impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> {
fn read_ty_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>, cdata: &cstore::crate_metadata) -> Ty<'tcx> {
self.read_opaque(|_, doc| {
Ok(tydecode::parse_ty_data(
doc.data,
cdata.cnum,
doc.start,
tcx,
|_, id| decoder::translate_def_id(cdata, id)))
}).unwrap()
}
fn read_tys_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata) -> Vec<Ty<'tcx>> {
self.read_to_vec(|this| Ok(this.read_ty_nodcx(tcx, cdata)) )
.unwrap()
.into_iter()
.collect()
}
fn read_substs_nodcx(&mut self,
tcx: &ty::ctxt<'tcx>,
cdata: &cstore::crate_metadata)
-> subst::Substs<'tcx>
{
self.read_opaque(|_, doc| {
Ok(tydecode::parse_substs_data(
doc.data,
cdata.cnum,
doc.start,
tcx,
|_, id| decoder::translate_def_id(cdata, id)))
}).unwrap()
}
fn read_method_origin<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> typeck::MethodOrigin<'tcx>
{
self.read_enum("MethodOrigin", |this| {
let variants = &["MethodStatic", "MethodStaticUnboxedClosure",
"MethodTypeParam", "MethodTraitObject"];
this.read_enum_variant(variants, |this, i| {
Ok(match i {
0 => {
let def_id = this.read_def_id(dcx);
typeck::MethodStatic(def_id)
}
1 => {
let def_id = this.read_def_id(dcx);
typeck::MethodStaticUnboxedClosure(def_id)
}
2 => {
this.read_struct("MethodTypeParam", 2, |this| {
Ok(typeck::MethodTypeParam(
typeck::MethodParam {
trait_ref: {
this.read_struct_field("trait_ref", 0, |this| {
Ok(this.read_trait_ref(dcx))
}).unwrap()
},
method_num: {
this.read_struct_field("method_num", 1, |this| {
this.read_uint()
}).unwrap()
}
}))
}).unwrap()
}
3 => {
this.read_struct("MethodTraitObject", 2, |this| {
Ok(typeck::MethodTraitObject(
typeck::MethodObject {
trait_ref: {
this.read_struct_field("trait_ref", 0, |this| {
Ok(this.read_trait_ref(dcx))
}).unwrap()
},
object_trait_id: {
this.read_struct_field("object_trait_id", 1, |this| {
Ok(this.read_def_id(dcx))
}).unwrap()
},
method_num: {
this.read_struct_field("method_num", 2, |this| {
this.read_uint()
}).unwrap()
},
real_index: {
this.read_struct_field("real_index", 3, |this| {
this.read_uint()
}).unwrap()
},
}))
}).unwrap()
}
_ => panic!("..")
})
})
}).unwrap()
}
fn read_ty<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> Ty<'tcx> {
// Note: regions types embed local node ids. In principle, we
// should translate these node ids into the new decode
// context. However, we do not bother, because region types
// are not used during trans.
return self.read_opaque(|this, doc| {
debug!("read_ty({})", type_string(doc));
let ty = tydecode::parse_ty_data(
doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a));
Ok(ty)
}).unwrap();
fn type_string(doc: rbml::Doc) -> String {
let mut str = String::new();
for i in range(doc.start, doc.end) {
str.push(doc.data[i] as char);
}
str
}
}
fn read_tys<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> Vec<Ty<'tcx>> {
self.read_to_vec(|this| Ok(this.read_ty(dcx))).unwrap().into_iter().collect()
}
fn read_trait_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> Rc<ty::TraitRef<'tcx>> {
Rc::new(self.read_opaque(|this, doc| {
let ty = tydecode::parse_trait_ref_data(
doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a));
Ok(ty)
}).unwrap())
}
fn read_type_param_def<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::TypeParameterDef<'tcx> {
self.read_opaque(|this, doc| {
Ok(tydecode::parse_type_param_def_data(
doc.data,
doc.start,
dcx.cdata.cnum,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a)))
}).unwrap()
}
fn read_polytype<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::Polytype<'tcx> {
self.read_struct("Polytype", 2, |this| {
Ok(ty::Polytype {
generics: this.read_struct_field("generics", 0, |this| {
this.read_struct("Generics", 2, |this| {
Ok(ty::Generics {
types:
this.read_struct_field("types", 0, |this| {
Ok(this.read_vec_per_param_space(
|this| this.read_type_param_def(dcx)))
}).unwrap(),
regions:
this.read_struct_field("regions", 1, |this| {
Ok(this.read_vec_per_param_space(
|this| Decodable::decode(this).unwrap()))
}).unwrap()
})
})
}).unwrap(),
ty: this.read_struct_field("ty", 1, |this| {
Ok(this.read_ty(dcx))
}).unwrap()
})
}).unwrap()
}
fn read_existential_bounds<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::ExistentialBounds
{
self.read_opaque(|this, doc| {
Ok(tydecode::parse_existential_bounds_data(doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a)))
}).unwrap()
}
fn read_substs<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> subst::Substs<'tcx> {
self.read_opaque(|this, doc| {
Ok(tydecode::parse_substs_data(doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a)))
}).unwrap()
}
fn read_auto_adjustment<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::AutoAdjustment<'tcx> {
self.read_enum("AutoAdjustment", |this| {
let variants = ["AutoAddEnv", "AutoDerefRef"];
this.read_enum_variant(&variants, |this, i| {
Ok(match i {
0 => {
let store: ty::TraitStore =
this.read_enum_variant_arg(0, |this| Decodable::decode(this)).unwrap();
ty::AdjustAddEnv(store.tr(dcx))
}
1 => {
let auto_deref_ref: ty::AutoDerefRef =
this.read_enum_variant_arg(0,
|this| Ok(this.read_auto_deref_ref(dcx))).unwrap();
ty::AdjustDerefRef(auto_deref_ref)
}
_ => panic!("bad enum variant for ty::AutoAdjustment")
})
})
}).unwrap()
}
fn read_auto_deref_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::AutoDerefRef<'tcx> {
self.read_struct("AutoDerefRef", 2, |this| {
Ok(ty::AutoDerefRef {
autoderefs: this.read_struct_field("autoderefs", 0, |this| {
Decodable::decode(this)
}).unwrap(),
autoref: this.read_struct_field("autoref", 1, |this| {
this.read_option(|this, b| {
if b {
Ok(Some(this.read_autoref(dcx)))
} else {
Ok(None)
}
})
}).unwrap(),
})
}).unwrap()
}
fn read_autoref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> ty::AutoRef<'tcx> {
self.read_enum("AutoRef", |this| {
let variants = ["AutoPtr",
"AutoUnsize",
"AutoUnsizeUniq",
"AutoUnsafe"];
this.read_enum_variant(&variants, |this, i| {
Ok(match i {
0 => {
let r: ty::Region =
this.read_enum_variant_arg(0, |this| Decodable::decode(this)).unwrap();
let m: ast::Mutability =
this.read_enum_variant_arg(1, |this| Decodable::decode(this)).unwrap();
let a: Option<Box<ty::AutoRef>> =
this.read_enum_variant_arg(2, |this| this.read_option(|this, b| {
if b {
Ok(Some(box this.read_autoref(dcx)))
} else {
Ok(None)
}
})).unwrap();
ty::AutoPtr(r.tr(dcx), m, a)
}
1 => {
let uk: ty::UnsizeKind =
this.read_enum_variant_arg(0,
|this| Ok(this.read_unsize_kind(dcx))).unwrap();
ty::AutoUnsize(uk)
}
2 => {
let uk: ty::UnsizeKind =
this.read_enum_variant_arg(0,
|this| Ok(this.read_unsize_kind(dcx))).unwrap();
ty::AutoUnsizeUniq(uk)
}
3 => {
let m: ast::Mutability =
this.read_enum_variant_arg(0, |this| Decodable::decode(this)).unwrap();
let a: Option<Box<ty::AutoRef>> =
this.read_enum_variant_arg(1, |this| this.read_option(|this, b| {
if b {
Ok(Some(box this.read_autoref(dcx)))
} else {
Ok(None)
}
})).unwrap();
ty::AutoUnsafe(m, a)
}
_ => panic!("bad enum variant for ty::AutoRef")
})
})
}).unwrap()
}
fn read_unsize_kind<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::UnsizeKind<'tcx> {
self.read_enum("UnsizeKind", |this| {
let variants = &["UnsizeLength", "UnsizeStruct", "UnsizeVtable"];
this.read_enum_variant(variants, |this, i| {
Ok(match i {
0 => {
let len: uint =
this.read_enum_variant_arg(0, |this| Decodable::decode(this)).unwrap();
ty::UnsizeLength(len)
}
1 => {
let uk: ty::UnsizeKind =
this.read_enum_variant_arg(0,
|this| Ok(this.read_unsize_kind(dcx))).unwrap();
let idx: uint =
this.read_enum_variant_arg(1, |this| Decodable::decode(this)).unwrap();
ty::UnsizeStruct(box uk, idx)
}
2 => {
let ty_trait = try!(this.read_enum_variant_arg(0, |this| {
let principal = try!(this.read_struct_field("principal", 0, |this| {
Ok(this.read_trait_ref(dcx))
}));
Ok(ty::TyTrait {
principal: (*principal).clone(),
bounds: try!(this.read_struct_field("bounds", 1, |this| {
Ok(this.read_existential_bounds(dcx))
})),
})
}));
let self_ty =
this.read_enum_variant_arg(1, |this| Ok(this.read_ty(dcx))).unwrap();
ty::UnsizeVtable(ty_trait, self_ty)
}
_ => panic!("bad enum variant for ty::UnsizeKind")
})
})
}).unwrap()
}
fn read_unboxed_closure<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::UnboxedClosure<'tcx> {
let closure_type = self.read_opaque(|this, doc| {
Ok(tydecode::parse_ty_closure_data(
doc.data,
dcx.cdata.cnum,
doc.start,
dcx.tcx,
|s, a| this.convert_def_id(dcx, s, a)))
}).unwrap();
let variants = &[
"FnUnboxedClosureKind",
"FnMutUnboxedClosureKind",
"FnOnceUnboxedClosureKind"
];
let kind = self.read_enum("UnboxedClosureKind", |this| {
this.read_enum_variant(variants, |_, i| {
Ok(match i {
0 => ty::FnUnboxedClosureKind,
1 => ty::FnMutUnboxedClosureKind,
2 => ty::FnOnceUnboxedClosureKind,
_ => panic!("bad enum variant for ty::UnboxedClosureKind"),
})
})
}).unwrap();
ty::UnboxedClosure {
closure_type: closure_type,
kind: kind,
}
}
fn convert_def_id(&mut self,
dcx: &DecodeContext,
source: tydecode::DefIdSource,
did: ast::DefId)
-> ast::DefId {
/*!
* Converts a def-id that appears in a type. The correct
* translation will depend on what kind of def-id this is.
* This is a subtle point: type definitions are not
* inlined into the current crate, so if the def-id names
* a nominal type or type alias, then it should be
* translated to refer to the source crate.
*
* However, *type parameters* are cloned along with the function
* they are attached to. So we should translate those def-ids
* to refer to the new, cloned copy of the type parameter.
* We only see references to free type parameters in the body of
* an inlined function. In such cases, we need the def-id to
* be a local id so that the TypeContents code is able to lookup
* the relevant info in the ty_param_defs table.
*
* *Region parameters*, unfortunately, are another kettle of fish.
* In such cases, def_id's can appear in types to distinguish
* shadowed bound regions and so forth. It doesn't actually
* matter so much what we do to these, since regions are erased
* at trans time, but it's good to keep them consistent just in
* case. We translate them with `tr_def_id()` which will map
* the crate numbers back to the original source crate.
*
* Unboxed closures are cloned along with the function being
* inlined, and all side tables use interned node IDs, so we
* translate their def IDs accordingly.
*
* It'd be really nice to refactor the type repr to not include
* def-ids so that all these distinctions were unnecessary.
*/
let r = match source {
NominalType | TypeWithId | RegionParameter => dcx.tr_def_id(did),
TypeParameter | UnboxedClosureSource => dcx.tr_intern_def_id(did)
};
debug!("convert_def_id(source={}, did={})={}", source, did, r);
return r;
}
}
fn decode_side_tables(dcx: &DecodeContext,
ast_doc: rbml::Doc) {
let tbl_doc = ast_doc.get(c::tag_table as uint);
reader::docs(tbl_doc, |tag, entry_doc| {
let id0 = entry_doc.get(c::tag_table_id as uint).as_int();
let id = dcx.tr_id(id0 as ast::NodeId);
debug!(">> Side table document with tag 0x{:x} \
found for id {} (orig {})",
tag, id, id0);
match c::astencode_tag::from_uint(tag) {
None => {
dcx.tcx.sess.bug(
format!("unknown tag found in side tables: {:x}",
tag).as_slice());
}
Some(value) => {
let val_doc = entry_doc.get(c::tag_table_val as uint);
let mut val_dsr = reader::Decoder::new(val_doc);
let val_dsr = &mut val_dsr;
match value {
c::tag_table_def => {
let def = decode_def(dcx, val_doc);
dcx.tcx.def_map.borrow_mut().insert(id, def);
}
c::tag_table_node_type => {
let ty = val_dsr.read_ty(dcx);
debug!("inserting ty for node {}: {}",
id, ty_to_string(dcx.tcx, ty));
dcx.tcx.node_types.borrow_mut().insert(id, ty);
}
c::tag_table_item_subst => {
let item_substs = ty::ItemSubsts {
substs: val_dsr.read_substs(dcx)
};
dcx.tcx.item_substs.borrow_mut().insert(
id, item_substs);
}
c::tag_table_freevars => {
let fv_info = val_dsr.read_to_vec(|val_dsr| {
Ok(val_dsr.read_freevar_entry(dcx))
}).unwrap().into_iter().collect();
dcx.tcx.freevars.borrow_mut().insert(id, fv_info);
}
c::tag_table_upvar_borrow_map => {
let var_id: ast::NodeId = Decodable::decode(val_dsr).unwrap();
let upvar_id = ty::UpvarId {
var_id: dcx.tr_id(var_id),
closure_expr_id: id
};
let ub: ty::UpvarBorrow = Decodable::decode(val_dsr).unwrap();
dcx.tcx.upvar_borrow_map.borrow_mut().insert(upvar_id, ub.tr(dcx));
}
c::tag_table_capture_modes => {
let capture_mode = val_dsr.read_capture_mode();
dcx.tcx
.capture_modes
.borrow_mut()
.insert(id, capture_mode);
}
c::tag_table_tcache => {
let pty = val_dsr.read_polytype(dcx);
let lid = ast::DefId { krate: ast::LOCAL_CRATE, node: id };
dcx.tcx.tcache.borrow_mut().insert(lid, pty);
}
c::tag_table_param_defs => {
let bounds = val_dsr.read_type_param_def(dcx);
dcx.tcx.ty_param_defs.borrow_mut().insert(id, bounds);
}
c::tag_table_method_map => {
let (adjustment, method) = val_dsr.read_method_callee(dcx);
let method_call = MethodCall {
expr_id: id,
adjustment: adjustment
};
dcx.tcx.method_map.borrow_mut().insert(method_call, method);
}
c::tag_table_object_cast_map => {
let trait_ref = val_dsr.read_trait_ref(dcx);
dcx.tcx.object_cast_map.borrow_mut()
.insert(id, trait_ref);
}
c::tag_table_adjustments => {
let adj: ty::AutoAdjustment = val_dsr.read_auto_adjustment(dcx);
dcx.tcx.adjustments.borrow_mut().insert(id, adj);
}
c::tag_table_unboxed_closures => {
let unboxed_closure =
val_dsr.read_unboxed_closure(dcx);
dcx.tcx
.unboxed_closures
.borrow_mut()
.insert(ast_util::local_def(id),
unboxed_closure);
}
_ => {
dcx.tcx.sess.bug(
format!("unknown tag found in side tables: {:x}",
tag).as_slice());
}
}
}
}
debug!(">< Side table doc loaded");
true
});
}
// ______________________________________________________________________
// Testing of astencode_gen
#[cfg(test)]
fn encode_item_ast(rbml_w: &mut Encoder, item: &ast::Item) {
rbml_w.start_tag(c::tag_tree as uint);
(*item).encode(rbml_w);
rbml_w.end_tag();
}
#[cfg(test)]
fn decode_item_ast(par_doc: rbml::Doc) -> ast::Item {
let chi_doc = par_doc.get(c::tag_tree as uint);
let mut d = reader::Decoder::new(chi_doc);
Decodable::decode(&mut d).unwrap()
}
#[cfg(test)]
trait fake_ext_ctxt {
fn cfg(&self) -> ast::CrateConfig;
fn parse_sess<'a>(&'a self) -> &'a parse::ParseSess;
fn call_site(&self) -> Span;
fn ident_of(&self, st: &str) -> ast::Ident;
}
#[cfg(test)]
impl fake_ext_ctxt for parse::ParseSess {
fn cfg(&self) -> ast::CrateConfig {
Vec::new()
}
fn parse_sess<'a>(&'a self) -> &'a parse::ParseSess { self }
fn call_site(&self) -> Span {
codemap::Span {
lo: codemap::BytePos(0),
hi: codemap::BytePos(0),
expn_id: codemap::NO_EXPANSION
}
}
fn ident_of(&self, st: &str) -> ast::Ident {
token::str_to_ident(st)
}
}
#[cfg(test)]
fn mk_ctxt() -> parse::ParseSess {
parse::new_parse_sess()
}
#[cfg(test)]
fn roundtrip(in_item: Option<P<ast::Item>>) {
let in_item = in_item.unwrap();
let mut wr = SeekableMemWriter::new();
encode_item_ast(&mut writer::Encoder::new(&mut wr), &*in_item);
let rbml_doc = rbml::Doc::new(wr.get_ref());
let out_item = decode_item_ast(rbml_doc);
assert!(*in_item == out_item);
}
#[test]
fn test_basic() {
let cx = mk_ctxt();
roundtrip(quote_item!(&cx,
fn foo() {}
));
}
/* NOTE: When there's a snapshot, update this (yay quasiquoter!)
#[test]
fn test_smalltalk() {
let cx = mk_ctxt();
roundtrip(quote_item!(&cx,
fn foo() -> int { 3 + 4 } // first smalltalk program ever executed.
));
}
*/
#[test]
fn test_more() {
let cx = mk_ctxt();
roundtrip(quote_item!(&cx,
fn foo(x: uint, y: uint) -> uint {
let z = x + y;
return z;
}
));
}
#[test]
fn test_simplification() {
let cx = mk_ctxt();
let item = quote_item!(&cx,
fn new_int_alist<B>() -> alist<int, B> {
fn eq_int(a: int, b: int) -> bool { a == b }
return alist {eq_fn: eq_int, data: Vec::new()};
}
).unwrap();
let item_in = e::IIItemRef(&*item);
let item_out = simplify_ast(item_in);
let item_exp = ast::IIItem(quote_item!(&cx,
fn new_int_alist<B>() -> alist<int, B> {
return alist {eq_fn: eq_int, data: Vec::new()};
}
).unwrap());
match (item_out, item_exp) {
(ast::IIItem(item_out), ast::IIItem(item_exp)) => {
assert!(pprust::item_to_string(&*item_out) ==
pprust::item_to_string(&*item_exp));
}
_ => panic!()
}
} | |
category.server.model.js | 'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
Schema = mongoose.Schema;
/**
* Validation
*/
function validateLength (v) {
// a custom validation function for checking string length to be used by the model
return v.length <= 30;
} | * Category Schema
*/
var CategorySchema = new Schema({
description: {
type: String,
default: '',
// types have specific functions e.g. trim, lowercase, uppercase (http://mongoosejs.com/docs/api.html#schema-string-js)
trim: true
},
name: {
type: String,
default: '',
trim: true,
unique : true,
// make this a required field
required: 'name cannot be blank',
// wires in a custom validator function (http://mongoosejs.com/docs/api.html#schematype_SchemaType-validate).
validate: [validateLength, 'name must be 30 chars in length or less']
},
category: {
type: String,
default: '',
trim: true
}
}
);
// Expose the model to other objects (similar to a 'public' setter).
mongoose.model('Videos', CategorySchema); |
/** |
KPIBoxStyles.js | import variables from '../../../styles/variables';
const styles = theme => ({
KpiBox: {
display: "flex",
flexDirection: "row",
height: 80,
alignItems: 'center',
cursor: 'pointer'
},
KPIItemSmall: {
flex: 1,
fontFamily: 'montserrat',
textAlign: 'right'
},
KPIItem: {
display: 'flex',
flexDirection: 'column',
overflow: 'hidden',
flex: 2
},
heading: {
}, | color: variables.lighterGray
},
fontLarge: {
fontSize: 37
},
fontMedium: {
fontSize: 22
},
fontSmall: {
fontSize: 14,
paddingTop: 5
},
item: {
display: 'table-cell'
}
});
export default styles; | fontLightGray: { |
loopingcall.py | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from eventlet import event
from eventlet import greenthread
from heat.openstack.common.gettextutils import _
from heat.openstack.common import log as logging
from heat.openstack.common import timeutils
LOG = logging.getLogger(__name__)
class LoopingCallDone(Exception):
"""Exception to break out and stop a LoopingCall.
The poll-function passed to LoopingCall can raise this exception to
break out of the loop normally. This is somewhat analogous to
StopIteration.
An optional return-value can be included as the argument to the exception;
this return-value will be returned by LoopingCall.wait()
"""
def __init__(self, retvalue=True):
|
class LoopingCallBase(object):
def __init__(self, f=None, *args, **kw):
self.args = args
self.kw = kw
self.f = f
self._running = False
self.done = None
def stop(self):
self._running = False
def wait(self):
return self.done.wait()
class FixedIntervalLoopingCall(LoopingCallBase):
"""A fixed interval looping call."""
def start(self, interval, initial_delay=None):
self._running = True
done = event.Event()
def _inner():
if initial_delay:
greenthread.sleep(initial_delay)
try:
while self._running:
start = timeutils.utcnow()
self.f(*self.args, **self.kw)
end = timeutils.utcnow()
if not self._running:
break
delay = interval - timeutils.delta_seconds(start, end)
if delay <= 0:
LOG.warn(_('task run outlasted interval by %s sec') %
-delay)
greenthread.sleep(delay if delay > 0 else 0)
except LoopingCallDone as e:
self.stop()
done.send(e.retvalue)
except Exception:
LOG.exception(_('in fixed duration looping call'))
done.send_exception(*sys.exc_info())
return
else:
done.send(True)
self.done = done
greenthread.spawn_n(_inner)
return self.done
# TODO(mikal): this class name is deprecated in Havana and should be removed
# in the I release
LoopingCall = FixedIntervalLoopingCall
class DynamicLoopingCall(LoopingCallBase):
"""A looping call which sleeps until the next known event.
The function called should return how long to sleep for before being
called again.
"""
def start(self, initial_delay=None, periodic_interval_max=None):
self._running = True
done = event.Event()
def _inner():
if initial_delay:
greenthread.sleep(initial_delay)
try:
while self._running:
idle = self.f(*self.args, **self.kw)
if not self._running:
break
if periodic_interval_max is not None:
idle = min(idle, periodic_interval_max)
LOG.debug(_('Dynamic looping call sleeping for %.02f '
'seconds'), idle)
greenthread.sleep(idle)
except LoopingCallDone as e:
self.stop()
done.send(e.retvalue)
except Exception:
LOG.exception(_('in dynamic looping call'))
done.send_exception(*sys.exc_info())
return
else:
done.send(True)
self.done = done
greenthread.spawn(_inner)
return self.done
| """:param retvalue: Value that LoopingCall.wait() should return."""
self.retvalue = retvalue |
VectorTileParser.js | import { Vector2, Vector3 } from 'three';
import Protobuf from 'pbf';
import { VectorTile } from '@mapbox/vector-tile';
import { worldDimension3857 } from 'Core/Geographic/Extent';
import { FeatureCollection, FEATURE_TYPES } from 'Core/Feature';
import { featureFilter } from '@mapbox/mapbox-gl-style-spec';
import Style from 'Core/Style';
const globalExtent = new Vector3(worldDimension3857.x, worldDimension3857.y, 1);
const lastPoint = new Vector2();
const firstPoint = new Vector2();
const styleCache = new Map();
// Classify option, it allows to classify a full polygon and its holes.
// Each polygon with its holes are in one FeatureGeometry.
// A polygon is determined by its clockwise direction and the holes are in the opposite direction.
// Clockwise direction is determined by Shoelace formula https://en.wikipedia.org/wiki/Shoelace_formula
// Draw polygon with canvas doesn't need to classify however it is necessary for meshs.
function vtFeatureToFeatureGeometry(vtFeature, feature, classify = false) {
let geometry = feature.bindNewGeometry();
classify = classify && (feature.type === FEATURE_TYPES.POLYGON);
geometry.properties = vtFeature.properties;
const pbf = vtFeature._pbf;
pbf.pos = vtFeature._geometry;
const end = pbf.readVarint() + pbf.pos;
let cmd = 1;
let length = 0;
let x = 0;
let y = 0;
let count = 0;
let sum = 0;
while (pbf.pos < end) {
if (length <= 0) {
const cmdLen = pbf.readVarint();
cmd = cmdLen & 0x7;
length = cmdLen >> 3;
}
length--;
if (cmd === 1 || cmd === 2) {
x += pbf.readSVarint();
y += pbf.readSVarint();
if (cmd === 1) {
if (count) {
if (classify && sum > 0 && geometry.indices.length > 0) {
feature.updateExtent(geometry);
geometry = feature.bindNewGeometry();
geometry.properties = vtFeature.properties;
}
geometry.closeSubGeometry(count);
geometry.getLastSubGeometry().ccw = sum < 0;
}
count = 0;
sum = 0;
}
count++;
geometry.pushCoordinatesValues(x, y);
if (count == 1) {
firstPoint.set(x, y);
lastPoint.set(x, y);
} else if (classify && count > 1) {
sum += (lastPoint.x - x) * (lastPoint.y + y);
lastPoint.set(x, y);
}
} else if (cmd === 7) {
if (count) {
count++;
geometry.pushCoordinatesValues(firstPoint.x, firstPoint.y);
if (classify) {
sum += (lastPoint.x - firstPoint.x) * (lastPoint.y + firstPoint.y);
}
}
} else {
throw new Error(`unknown command ${cmd}`);
}
}
if (count) {
if (classify && sum > 0 && geometry.indices.length > 0) {
feature.updateExtent(geometry);
geometry = feature.bindNewGeometry();
geometry.properties = vtFeature.properties;
}
geometry.closeSubGeometry(count);
geometry.getLastSubGeometry().ccw = sum < 0;
}
feature.updateExtent(geometry);
}
const defaultFilter = () => true;
function | (file, options) {
const vectorTile = new VectorTile(new Protobuf(file));
const extentSource = options.extentSource || file.coords;
const sourceLayers = Object.keys(vectorTile.layers);
if (sourceLayers.length < 1) {
return;
}
// x,y,z tile coordinates
const x = extentSource.col;
const z = extentSource.zoom;
// We need to move from TMS to Google/Bing/OSM coordinates
// https://alastaira.wordpress.com/2011/07/06/converting-tms-tile-coordinates-to-googlebingosm-tile-coordinates/
// Only if the layer.origin is top
const y = options.isInverted ? extentSource.row : (1 << z) - extentSource.row - 1;
options.buildExtent = true;
options.mergeFeatures = true;
options.withAltitude = false;
options.withNormal = false;
const features = new FeatureCollection('EPSG:3857', options);
// TODO remove defaultFilter;
features.filter = options.filter || defaultFilter;
const vFeature = vectorTile.layers[sourceLayers[0]];
const size = vFeature.extent * 2 ** z;
const center = -0.5 * size;
features.scale.set(size, -size, 1).divide(globalExtent);
features.translation.set(-(vFeature.extent * x + center), -(vFeature.extent * y + center), 0).divide(features.scale);
const allLayers = features.filter;
if (!features.filter.loaded) {
allLayers.forEach((l) => {
l.filterExpression = featureFilter(l.filter);
});
features.filter.loaded = true;
}
sourceLayers.forEach((layer_id) => {
const sourceLayer = vectorTile.layers[layer_id];
const layersStyle = allLayers.filter(l => sourceLayer.name == l['source-layer']);
for (let i = 0; i < sourceLayer.length; i++) {
const vtFeature = sourceLayer.feature(i);
const layerStyle = layersStyle.filter(l => l.filterExpression({ zoom: extentSource.zoom }, vtFeature))[0];
if (layerStyle) {
const properties = vtFeature.properties;
properties.style = styleCache.get(layerStyle.id);
if (!properties.style) {
properties.style = new Style();
properties.style.setFromVectorTileLayer(layerStyle);
styleCache.set(layerStyle.id, properties.style);
}
const feature = features.getFeatureByType(vtFeature.type - 1);
vtFeatureToFeatureGeometry(vtFeature, feature);
}
}
});
features.removeEmptyFeature();
features.updateExtent();
features.extent = extentSource;
return Promise.resolve(features);
}
/**
* @module VectorTileParser
*/
export default {
/**
* Parse a vector tile file and return a [Feature]{@link module:GeoJsonParser.Feature}
* or an array of Features. While multiple formats of vector tile are
* available, the only one supported for the moment is the
* [Mapbox Vector Tile]{@link https://www.mapbox.com/vector-tiles/specification/}.
*
* @param {ArrayBuffer} file - The vector tile file to parse.
* @param {Object} options - Options controlling the parsing.
* @param {Extent} options.extent - The Extent to convert the input coordinates to.
* @param {Extent} options.coords - Coordinates of the layer.
* @param {Extent=} options.filteringExtent - Optional filter to reject features
* outside of this extent.
* @param {boolean} [options.mergeFeatures=true] - If true all geometries are merged by type and multi-type
* @param {boolean} [options.withNormal=true] - If true each coordinate normal is computed
* @param {boolean} [options.withAltitude=true] - If true each coordinate altitude is kept
* @param {function=} options.filter - Filter function to remove features.
* @param {string=} options.isInverted - This option is to be set to the
* correct value, true or false (default being false), if the computation of
* the coordinates needs to be inverted to same scheme as OSM, Google Maps
* or other system. See [this link]{@link
* https://alastaira.wordpress.com/2011/07/06/converting-tms-tile-coordinates-to-googlebingosm-tile-coordinates}
* for more informations.
*
* @return {Promise} A Promise resolving with a Feature or an array a
* Features.
*/
parse(file, options) {
return Promise.resolve(readPBF(file, options));
},
};
| readPBF |
imagename_dataset.py | from runner_master import runner
import os
import io
import torch
import logging
from PIL import Image, ImageFile
from runner_master.runner.data import datasets
# to fix "OSError: image file is truncated"
ImageFile.LOAD_TRUNCATED_IMAGES = True
class ImagenameDataset(datasets.ImglistDatasetV2):
def getitem(self, index):
line = self.imglist[index].strip('\n')
tokens = line.split(' ', maxsplit=1)
#if len(tokens) != 2:
# raise RuntimeError('split tokens < 2')
image_name, extra_str = tokens[0], tokens[1]
if self.root != '' and image_name.startswith('/'):
|
path = os.path.join(self.root, image_name)
sample = dict()
sample['image_name'] = image_name
try:
if not self.dummy_read:
filebytes = self.reader(path)
buff = io.BytesIO(filebytes)
if self.dummy_size is not None:
sample['data'] = torch.rand(self.dummy_size)
else:
image = Image.open(buff)
sample['data'] = self.transform_image(image)
for key, value in self.transform_extra(extra_str).items():
sample[key] = value
except Exception as e:
logging.error('[{}] broken'.format(path))
raise e
return sample
runner.patch_dataset('ImagenameDataset', ImagenameDataset)
| raise RuntimeError('root not empty but image_name starts with "/"') |
generalized_rcnn.py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
"""
Implements the Generalized R-CNN framework
"""
import torch
from torch import nn
from openpose.structures.image_list import to_image_list
from ..backbone import build_backbone
from ..rpn.rpn import build_rpn
from ..roi_heads.roi_heads import build_roi_heads
class GeneralizedRCNN(nn.Module):
"""
Main class for Generalized R-CNN. Currently supports boxes and masks.
It consists of three main parts:
- backbone
- rpn
- heads: takes the features + the proposals from the RPN and computes
detections / masks from it.
"""
def __init__(self, cfg):
super(GeneralizedRCNN, self).__init__()
self.backbone = build_backbone(cfg)
self.rpn = build_rpn(cfg, self.backbone.out_channels)
self.roi_heads = build_roi_heads(cfg, self.backbone.out_channels)
def forward(self, images, targets=None):
| """
Arguments:
images (list[Tensor] or ImageList): images to be processed
targets (list[BoxList]): ground-truth boxes present in the image (optional)
Returns:
result (list[BoxList] or dict[Tensor]): the output from the model.
During training, it returns a dict[Tensor] which contains the losses.
During testing, it returns list[BoxList] contains additional fields
like `scores`, `labels` and `mask` (for Mask R-CNN models).
"""
if self.training and targets is None:
raise ValueError("In training mode, targets should be passed")
images = to_image_list(images)
features = self.backbone(images.tensors)
proposals, proposal_losses = self.rpn(images, features, targets)
if self.roi_heads:
x, result, detector_losses = self.roi_heads(features, proposals, targets)
else:
# RPN-only models don't have roi_heads
x = features
result = proposals
detector_losses = {}
if self.training:
losses = {}
losses.update(detector_losses)
losses.update(proposal_losses)
return losses,result
return result |
|
test_abi.py | """Tests of getting ABI from different sources."""
import os
from collections import namedtuple
import pytest
from evmscript_parser.core.ABI import get_cached_combined
from evmscript_parser.core.ABI.storage import CachedStorage, ABIKey
from evmscript_parser.core.decode import decode_function_call
CUR_DIR = os.path.dirname(os.path.abspath(__file__))
INTERFACES = os.path.join(CUR_DIR, 'interfaces')
FunctionCall = namedtuple(
'FunctionCall',
field_names=['address', 'signature', 'name', 'call_data', 'was'],
)
positive_examples = (
# Tether
FunctionCall(
address='0xdac17f958d2ee523a2206206994597c13d831ec7',
signature='0x18160ddd',
name='totalSupply',
call_data='',
was=False
),
# Lido
FunctionCall(
address='0xae7ab96520de3a18e5e111b5eaab095312d7fe84',
signature='0x18160ddd',
name='totalSupply',
call_data='',
was=False
),
# Lido finance address
FunctionCall(
address='0x75c7b1D23f1cad7Fb4D60281d7069E46440BC179',
signature='0x33ea3dc8',
name='getTransaction',
call_data='1'.zfill(64),
was=False
),
# Lido, second call
FunctionCall(
address='0xae7ab96520de3a18e5e111b5eaab095312d7fe84',
signature='0x18160ddd',
name='totalSupply',
call_data='',
was=True
),
# Lido, wrong address, target signature not in ABI
FunctionCall(
address='0x75c7b1D23f1cad7Fb4D60281d7069E46440BC179',
signature='0x18160ddd',
name=None,
call_data='',
was=False
),
# Lido node operator registry
FunctionCall(
address='0x9D4AF1Ee19Dad8857db3a45B0374c81c8A1C6320',
signature='0x62dcfda1',
name='getRewardsDistribution',
call_data='1'.zfill(64),
was=False
)
)
@pytest.fixture(
scope='module', params=positive_examples,
ids=lambda x: f'{x.address}:{x.signature}'
)
def positive_example(request):
"""Get positive test case for call decoding."""
return request.param
@pytest.fixture(scope='module')
def | (
api_key: str, infura_prt_id: str, target_net: str
) -> CachedStorage:
"""Return prepared abi storage."""
return get_cached_combined(
api_key, target_net, INTERFACES
)
def test_combined_storage(abi_storage):
"""Run tests for prepared combined storage."""
interfaces = abi_storage._provider._interfaces
assert len(interfaces) > 0
assert '0x18160ddd' in interfaces
assert '0x35390714' in interfaces
assert '0x62dcfda1' in interfaces
def test_etherscan_api(abi_storage, positive_example: FunctionCall):
"""Run tests for getting ABI from Etherscan API."""
key = ABIKey(positive_example.address, positive_example.signature)
assert (key in abi_storage) is positive_example.was
decoded = decode_function_call(
positive_example.address, positive_example.signature,
positive_example.call_data, abi_storage,
)
if positive_example.name:
assert decoded.function_name == positive_example.name
else:
assert decoded is None
assert key in abi_storage
| abi_storage |
radar.go | package radar
import (
"image"
"image/color"
"image/draw"
"github.com/oakmound/oak/v3/render"
)
// Point is a utility function for location
type Point struct {
X, Y *float64
}
// Radar helps store and present information around interesting entities on a radar map
type Radar struct {
render.LayeredPoint
points map[Point]color.Color
center Point
width, height int
r *image.RGBA
outline *render.Sprite
ratio float64
}
var (
centerColor = color.RGBA{255, 255, 0, 255}
)
// NewRadar creates a radar that will display at 0,0 with the given dimensions.
// The points given will be displayed on the radar relative to the center point,
// With the absolute distance reduced by the given ratio
func | (w, h int, points map[Point]color.Color, center Point, ratio float64) *Radar {
r := new(Radar)
r.LayeredPoint = render.NewLayeredPoint(0, 0, 0)
r.points = points
r.width = w
r.height = h
r.center = center
r.r = image.NewRGBA(image.Rect(0, 0, w, h))
r.outline = render.NewColorBox(w, h, color.RGBA{0, 0, 125, 125})
r.ratio = ratio
return r
}
// SetPos sets the position of the radar on the screen
func (r *Radar) SetPos(x, y float64) {
r.LayeredPoint.SetPos(x, y)
r.outline.SetPos(x, y)
}
// GetRGBA returns this radar's image
func (r *Radar) GetRGBA() *image.RGBA {
return r.r
}
// Draw draws the radar at a given offset
func (r *Radar) Draw(buff draw.Image, xOff, yOff float64) {
// Draw each point p in r.points
// at r.X() + center.X() - p.X(), r.Y() + center.Y() - p.Y()
// IF that value is < r.width/2, > -r.width/2, < r.height/2, > -r.height/2
for p, c := range r.points {
x := int((*p.X-*r.center.X)/r.ratio) + r.width/2
y := int((*p.Y-*r.center.Y)/r.ratio) + r.height/2
for x2 := x - 1; x2 < x+1; x2++ {
for y2 := y - 1; y2 < y+1; y2++ {
r.r.Set(x2, y2, c)
}
}
}
r.r.Set(r.width/2, r.height/2, centerColor)
render.DrawImage(buff, r.r, int(xOff+r.X()), int(yOff+r.Y()))
r.outline.Draw(buff, xOff, yOff)
r.r = image.NewRGBA(image.Rect(0, 0, r.width, r.height))
}
// AddPoint adds an additional point to the radar to be tracked
func (r *Radar) AddPoint(loc Point, c color.Color) {
r.points[loc] = c
}
| NewRadar |
parse_rpt.py | drc_filename = "flow/reports/sky130hd/tempsense/6_final_drc.rpt"
num_lines = sum(1 for line in open(drc_filename))
if num_lines > 3:
raise ValueError("DRC failed!")
else:
print("DRC is clean!")
# LVS Bypassed
# lvs_filename = "flow/reports/sky130hd/tempsense/6_final_lvs.rpt" |
# regex = r"failed"
# match = re.search(regex, lvs_line)
# if match != None:
# raise ValueError("LVS failed!")
# else:
# print("LVS is clean!") | # lvs_line = subprocess.check_output(['tail', '-1', lvs_filename]).decode(sys.stdout.encoding) |
InstructionJz.py | from scfmsp.controlflowanalysis.StatusRegister import StatusRegister
from scfmsp.controlflowanalysis.instructions.AbstractInstructionBranching import AbstractInstructionBranching
class InstructionJz(AbstractInstructionBranching):
| name = 'jz'
def get_execution_time(self):
return 2
def get_branching_condition_domain(self, ac):
return ac.sra.get(StatusRegister.ZERO) |
|
source.go | // Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package source constructs public URLs that link to the source files in a module. It
// can be used to build references to Go source code, or to any other files in a
// module.
//
// Of course, the module zip file contains all the files in the module. This
// package attempts to find the origin of the zip file, in a repository that is
// publicly readable, and constructs links to that repo. While a module zip file
// could in theory come from anywhere, including a non-public location, this
// package recognizes standard module path patterns and construct repository
// URLs from them, like the go command does.
package source
//
// Much of this code was adapted from
// https://go.googlesource.com/gddo/+/refs/heads/master/gosrc
// and
// https://go.googlesource.com/go/+/refs/heads/master/src/cmd/go/internal/get
import (
"context"
"encoding/json"
"fmt"
"net/http"
"path"
"regexp"
"strconv"
"strings"
"time"
"go.opencensus.io/plugin/ochttp"
"go.opencensus.io/trace"
"golang.org/x/net/context/ctxhttp"
"golang.org/x/pkgsite/internal/derrors"
"golang.org/x/pkgsite/internal/log"
"golang.org/x/pkgsite/internal/stdlib"
"golang.org/x/pkgsite/internal/version"
)
// Info holds source information about a module, used to generate URLs referring
// to directories, files and lines.
type Info struct {
repoURL string // URL of repo containing module; exported for DB schema compatibility
moduleDir string // directory of module relative to repo root
commit string // tag or ID of commit corresponding to version
templates urlTemplates // for building URLs
}
// RepoURL returns a URL for the home page of the repository.
func (i *Info) RepoURL() string {
if i == nil {
return ""
}
if i.templates.Repo == "" {
// The default repo template is just "{repo}".
return i.repoURL
}
return expand(i.templates.Repo, map[string]string{
"repo": i.repoURL,
})
}
// ModuleURL returns a URL for the home page of the module.
func (i *Info) ModuleURL() string {
return i.DirectoryURL("")
}
// DirectoryURL returns a URL for a directory relative to the module's home directory.
func (i *Info) DirectoryURL(dir string) string {
if i == nil {
return ""
}
return strings.TrimSuffix(expand(i.templates.Directory, map[string]string{
"repo": i.repoURL,
"importPath": path.Join(strings.TrimPrefix(i.repoURL, "https://"), dir),
"commit": i.commit,
"dir": path.Join(i.moduleDir, dir),
}), "/")
}
// FileURL returns a URL for a file whose pathname is relative to the module's home directory.
func (i *Info) FileURL(pathname string) string {
if i == nil {
return ""
}
dir, base := path.Split(pathname)
return expand(i.templates.File, map[string]string{
"repo": i.repoURL,
"importPath": path.Join(strings.TrimPrefix(i.repoURL, "https://"), dir),
"commit": i.commit,
"dir": dir,
"file": path.Join(i.moduleDir, pathname),
"base": base,
})
}
// LineURL returns a URL referring to a line in a file relative to the module's home directory.
func (i *Info) LineURL(pathname string, line int) string {
if i == nil {
return ""
}
dir, base := path.Split(pathname)
return expand(i.templates.Line, map[string]string{
"repo": i.repoURL,
"importPath": path.Join(strings.TrimPrefix(i.repoURL, "https://"), dir),
"commit": i.commit,
"file": path.Join(i.moduleDir, pathname),
"dir": dir,
"base": base,
"line": strconv.Itoa(line),
})
}
// RawURL returns a URL referring to the raw contents of a file relative to the
// module's home directory.
func (i *Info) RawURL(pathname string) string {
if i == nil {
return ""
}
// Some templates don't support raw content serving.
if i.templates.Raw == "" {
return ""
}
moduleDir := i.moduleDir
// Special case: the standard library's source module path is set to "src",
// which is correct for source file links. But the README is at the repo
// root, not in the src directory. In other words,
// Module.Units[0].Readme.FilePath is not relative to
// Module.Units[0].SourceInfo.moduleDir, as it is for every other module.
// Correct for that here.
if i.repoURL == stdlib.GoSourceRepoURL {
moduleDir = ""
}
return expand(i.templates.Raw, map[string]string{
"repo": i.repoURL,
"commit": i.commit,
"file": path.Join(moduleDir, pathname),
})
}
// map of common urlTemplates
var urlTemplatesByKind = map[string]urlTemplates{
"github": githubURLTemplates,
"gitlab": githubURLTemplates, // preserved for backwards compatibility (DB still has source_info->Kind = "gitlab")
"bitbucket": bitbucketURLTemplates,
}
// jsonInfo is a Go struct describing the JSON structure of an INFO.
type jsonInfo struct {
RepoURL string
ModuleDir string
Commit string
// Store common templates efficiently by setting this to a short string
// we look up in a map. If Kind != "", then Templates == nil.
Kind string `json:",omitempty"`
Templates *urlTemplates `json:",omitempty"`
}
// ToJSONForDB returns the Info encoded for storage in the database.
func (i *Info) MarshalJSON() (_ []byte, err error) {
defer derrors.Wrap(&err, "MarshalJSON")
ji := &jsonInfo{
RepoURL: i.repoURL,
ModuleDir: i.moduleDir,
Commit: i.commit,
}
// Store common templates efficiently, by name.
for kind, templs := range urlTemplatesByKind {
if i.templates == templs {
ji.Kind = kind
break
}
}
// We used to use different templates for GitHub and GitLab. Now that
// they're the same, prefer "github" for consistency (map random iteration
// order means we could get either here).
if ji.Kind == "gitlab" {
ji.Kind = "github"
}
if ji.Kind == "" && i.templates != (urlTemplates{}) {
ji.Templates = &i.templates
}
return json.Marshal(ji)
}
func (i *Info) UnmarshalJSON(data []byte) (err error) {
defer derrors.Wrap(&err, "UnmarshalJSON(data)")
var ji jsonInfo
if err := json.Unmarshal(data, &ji); err != nil {
return err
}
i.repoURL = trimVCSSuffix(ji.RepoURL)
i.moduleDir = ji.ModuleDir
i.commit = ji.Commit
if ji.Kind != "" {
i.templates = urlTemplatesByKind[ji.Kind]
} else if ji.Templates != nil {
i.templates = *ji.Templates
}
return nil
}
type Client struct {
// client used for HTTP requests. It is mutable for testing purposes.
// If nil, then moduleInfoDynamic will return nil, nil; also for testing.
httpClient *http.Client
}
// New constructs a *Client using the provided timeout.
func NewClient(timeout time.Duration) *Client {
return &Client{
httpClient: &http.Client{
Transport: &ochttp.Transport{},
Timeout: timeout,
},
}
}
// NewClientForTesting returns a Client suitable for testing. It returns the
// same results as an ordinary client for statically recognizable paths, but
// always returns a nil *Info for dynamic paths (those requiring HTTP requests).
func NewClientForTesting() *Client {
return &Client{}
}
// doURL makes an HTTP request using the given url and method. It returns an
// error if the request returns an error. If only200 is true, it also returns an
// error if any status code other than 200 is returned.
func (c *Client) doURL(ctx context.Context, method, url string, only200 bool) (_ *http.Response, err error) {
defer derrors.Wrap(&err, "doURL(ctx, client, %q, %q)", method, url)
if c == nil || c.httpClient == nil {
return nil, fmt.Errorf("c.httpClient cannot be nil")
}
req, err := http.NewRequest(method, url, nil)
if err != nil {
return nil, err
}
resp, err := ctxhttp.Do(ctx, c.httpClient, req)
if err != nil {
return nil, err
}
if only200 && resp.StatusCode != 200 {
resp.Body.Close()
return nil, fmt.Errorf("status %s", resp.Status)
}
return resp, nil
}
// ModuleInfo determines the repository corresponding to the module path. It
// returns a URL to that repo, as well as the directory of the module relative
// to the repo root.
//
// ModuleInfo may fetch from arbitrary URLs, so it can be slow.
func ModuleInfo(ctx context.Context, client *Client, modulePath, version string) (info *Info, err error) {
defer derrors.Wrap(&err, "source.ModuleInfo(ctx, %q, %q)", modulePath, version)
ctx, span := trace.StartSpan(ctx, "source.ModuleInfo")
defer span.End()
// The example.com domain can never be real; it is reserved for testing
// (https://en.wikipedia.org/wiki/Example.com). Treat it as if it used
// GitHub templates.
if strings.HasPrefix(modulePath, "example.com/") {
return NewGitHubInfo("https://"+modulePath, "", version), nil
}
if modulePath == stdlib.ModulePath {
return newStdlibInfo(version)
}
repo, relativeModulePath, templates, transformCommit, err := matchStatic(modulePath)
if err != nil {
info, err = moduleInfoDynamic(ctx, client, modulePath, version)
if err != nil {
return nil, err
}
} else {
commit, isHash := commitFromVersion(version, relativeModulePath)
if transformCommit != nil {
commit = transformCommit(commit, isHash)
}
info = &Info{
repoURL: trimVCSSuffix("https://" + repo),
moduleDir: relativeModulePath,
commit: commit,
templates: templates,
}
}
if info != nil {
adjustVersionedModuleDirectory(ctx, client, info)
}
return info, nil
// TODO(golang/go#39627): support launchpad.net, including the special case
// in cmd/go/internal/get/vcs.go.
}
func newStdlibInfo(version string) (_ *Info, err error) {
defer derrors.Wrap(&err, "newStdlibInfo(%q)", version)
commit, err := stdlib.TagForVersion(version)
if err != nil {
return nil, err
}
templates := googlesourceURLTemplates
templates.Raw = "https://github.com/golang/go/raw/{commit}/{file}"
return &Info{
repoURL: stdlib.GoSourceRepoURL,
moduleDir: stdlib.Directory(version),
commit: commit,
templates: templates,
}, nil
}
// matchStatic matches the given module or repo path against a list of known
// patterns. It returns the repo name, the module path relative to the repo
// root, and URL templates if there is a match.
//
// The relative module path may not be correct in all cases: it is wrong if it
// ends in a version that is not part of the repo directory structure, because
// the repo follows the "major branch" convention for versions 2 and above.
// E.g. this function could return "foo/v2", but the module files live under "foo"; the
// "/v2" is part of the module path (and the import paths of its packages) but
// is not a subdirectory. This mistake is corrected in adjustVersionedModuleDirectory,
// once we have all the information we need to fix it.
//
// repo + "/" + relativeModulePath is often, but not always, equal to
// moduleOrRepoPath. It is not when the argument is a module path that uses the
// go command's general syntax, which ends in a ".vcs" (e.g. ".git", ".hg") that
// is neither part of the repo nor the suffix. For example, if the argument is
// github.com/a/b/c
// then repo="github.com/a/b" and relativeModulePath="c"; together they make up the module path.
// But if the argument is
// example.com/a/b.git/c
// then repo="example.com/a/b" and relativeModulePath="c"; the ".git" is omitted, since it is neither
// part of the repo nor part of the relative path to the module within the repo.
func matchStatic(moduleOrRepoPath string) (repo, relativeModulePath string, _ urlTemplates, transformCommit transformCommitFunc, _ error) {
for _, pat := range patterns {
matches := pat.re.FindStringSubmatch(moduleOrRepoPath)
if matches == nil {
continue
}
var repo string
for i, n := range pat.re.SubexpNames() {
if n == "repo" {
repo = matches[i]
break
}
}
// Special case: git.apache.org has a go-import tag that points to
// github.com/apache, but it's not quite right (the repo prefix is
// missing a ".git"), so handle it here.
const apacheDomain = "git.apache.org/"
if strings.HasPrefix(repo, apacheDomain) {
repo = strings.Replace(repo, apacheDomain, "github.com/apache/", 1)
}
// Special case: module paths are blitiri.com.ar/go/..., but repos are blitiri.com.ar/git/r/...
if strings.HasPrefix(repo, "blitiri.com.ar/") {
repo = strings.Replace(repo, "/go/", "/git/r/", 1)
}
relativeModulePath = strings.TrimPrefix(moduleOrRepoPath, matches[0])
relativeModulePath = strings.TrimPrefix(relativeModulePath, "/")
return repo, relativeModulePath, pat.templates, pat.transformCommit, nil
}
return "", "", urlTemplates{}, nil, derrors.NotFound
}
// moduleInfoDynamic uses the go-import and go-source meta tags to construct an Info.
func moduleInfoDynamic(ctx context.Context, client *Client, modulePath, version string) (_ *Info, err error) {
defer derrors.Wrap(&err, "source.moduleInfoDynamic(ctx, client, %q, %q)", modulePath, version)
if client.httpClient == nil {
return nil, nil // for testing
}
sourceMeta, err := fetchMeta(ctx, client, modulePath)
if err != nil {
return nil, err
}
// Don't check that the tag information at the repo root prefix is the same
// as in the module path. It was done for us by the proxy and/or go command.
// (This lets us merge information from the go-import and go-source tags.)
// sourceMeta contains some information about where the module's source lives. But there
// are some problems:
// - We may only have a go-import tag, not a go-source tag, so we don't have URL templates for
// building URLs to files and directories.
// - Even if we do have a go-source tag, its URL template format predates
// versioning, so the URL templates won't provide a way to specify a
// version or commit.
//
// We resolve these problems as follows:
// 1. First look at the repo URL from the tag. If that matches a known hosting site, use the
// URL templates corresponding to that site and ignore whatever's in the tag.
// 2. Then look at the URL templates to see if they match a known pattern, and use the templates
// from that pattern. For example, the meta tags for gopkg.in/yaml.v2 only mention github
// in the URL templates, like "https://github.com/go-yaml/yaml/tree/v2.2.3{/dir}". We can observe
// that that template begins with a known pattern--a GitHub repo, ignore the rest of it, and use the
// GitHub URL templates that we know.
repoURL := sourceMeta.repoURL
_, _, templates, transformCommit, _ := matchStatic(removeHTTPScheme(repoURL))
// If err != nil, templates will be the zero value, so we can ignore it (same just below).
if templates == (urlTemplates{}) {
var repo string
repo, _, templates, transformCommit, _ = matchStatic(removeHTTPScheme(sourceMeta.dirTemplate))
if templates == (urlTemplates{}) {
if err == nil {
templates, transformCommit = matchLegacyTemplates(ctx, sourceMeta)
repoURL = strings.TrimSuffix(repoURL, ".git")
} else {
log.Infof(ctx, "no templates for repo URL %q from meta tag: err=%v", sourceMeta.repoURL, err)
}
} else {
// Use the repo from the template, not the original one.
repoURL = "https://" + repo
}
}
dir := strings.TrimPrefix(strings.TrimPrefix(modulePath, sourceMeta.repoRootPrefix), "/")
commit, isHash := commitFromVersion(version, dir)
if transformCommit != nil {
commit = transformCommit(commit, isHash)
}
return &Info{
repoURL: strings.TrimSuffix(repoURL, "/"),
moduleDir: dir,
commit: commit,
templates: templates,
}, nil
}
// List of template regexps and their corresponding likely templates,
// used by matchLegacyTemplates below.
var legacyTemplateMatches = []struct {
fileRegexp *regexp.Regexp
templates urlTemplates
transformCommit transformCommitFunc
}{
{
regexp.MustCompile(`/src/branch/\w+\{/dir\}/\{file\}#L\{line\}$`),
giteaURLTemplates, giteaTransformCommit,
},
{
regexp.MustCompile(`/src/\w+\{/dir\}/\{file\}#L\{line\}$`),
giteaURLTemplates, nil,
},
{
regexp.MustCompile(`/-/blob/\w+\{/dir\}/\{file\}#L\{line\}$`),
gitlab2URLTemplates, nil,
},
{
regexp.MustCompile(`/tree\{/dir\}/\{file\}#n\{line\}$`),
fdioURLTemplates, fdioTransformCommit,
},
}
// matchLegacyTemplates matches the templates from the go-source meta tag
// against some known patterns to guess the version-aware URL templates. If it
// can't find a match, it falls back using the go-source templates with some
// small replacements. These will not be version-aware but will still serve
// source at a fixed commit, which is better than nothing.
func matchLegacyTemplates(ctx context.Context, sm *sourceMeta) (_ urlTemplates, transformCommit transformCommitFunc) {
if sm.fileTemplate == "" {
return urlTemplates{}, nil
}
for _, ltm := range legacyTemplateMatches {
if ltm.fileRegexp.MatchString(sm.fileTemplate) {
return ltm.templates, ltm.transformCommit
}
}
log.Infof(ctx, "matchLegacyTemplates: no matches for repo URL %q; replacing", sm.repoURL)
rep := strings.NewReplacer(
"{/dir}/{file}", "/{file}",
"{dir}/{file}", "{file}",
"{/dir}", "/{dir}")
line := rep.Replace(sm.fileTemplate)
file := line
if i := strings.LastIndexByte(line, '#'); i > 0 {
file = line[:i]
}
return urlTemplates{
Repo: sm.repoURL,
Directory: rep.Replace(sm.dirTemplate),
File: file,
Line: line,
}, nil
}
// adjustVersionedModuleDirectory changes info.moduleDir if necessary to
// correctly reflect the repo structure. info.moduleDir will be wrong if it has
// a suffix "/vN" for N > 1, and the repo uses the "major branch" convention,
// where modules at version 2 and higher live on branches rather than
// subdirectories. See https://research.swtch.com/vgo-module for a discussion of
// the "major branch" vs. "major subdirectory" conventions for organizing a
// repo.
func adjustVersionedModuleDirectory(ctx context.Context, client *Client, info *Info) {
dirWithoutVersion := removeVersionSuffix(info.moduleDir)
if info.moduleDir == dirWithoutVersion {
return
}
// moduleDir does have a "/vN" for N > 1. To see if that is the actual directory,
// fetch the go.mod file from it.
res, err := client.doURL(ctx, "HEAD", info.FileURL("go.mod"), true)
// On any failure, assume that the right directory is the one without the version.
if err != nil {
info.moduleDir = dirWithoutVersion
} else {
res.Body.Close()
}
}
// removeHTTPScheme removes an initial "http://" or "https://" from url.
// The result can be used to match against our static patterns.
// If the URL uses a different scheme, it won't be removed and it won't
// match any patterns, as intended.
func removeHTTPScheme(url string) string {
for _, prefix := range []string{"https://", "http://"} {
if strings.HasPrefix(url, prefix) {
return url[len(prefix):]
}
}
return url
}
// removeVersionSuffix returns s with "/vN" removed if N is an integer > 1.
// Otherwise it returns s.
func removeVersionSuffix(s string) string {
dir, base := path.Split(s)
if !strings.HasPrefix(base, "v") {
return s
}
if n, err := strconv.Atoi(base[1:]); err != nil || n < 2 {
return s
}
return strings.TrimSuffix(dir, "/")
}
type transformCommitFunc func(commit string, isHash bool) string
// Patterns for determining repo and URL templates from module paths or repo
// URLs. Each regexp must match a prefix of the target string, and must have a
// group named "repo".
var patterns = []struct {
pattern string // uncompiled regexp
templates urlTemplates
re *regexp.Regexp
// transformCommit may alter the commit before substitution
transformCommit transformCommitFunc
}{
{
pattern: `^(?P<repo>github\.com/[a-z0-9A-Z_.\-]+/[a-z0-9A-Z_.\-]+)`,
templates: githubURLTemplates,
},
{
pattern: `^(?P<repo>bitbucket\.org/[a-z0-9A-Z_.\-]+/[a-z0-9A-Z_.\-]+)`,
templates: bitbucketURLTemplates,
},
{
pattern: `^(?P<repo>gitlab\.com/[a-z0-9A-Z_.\-]+/[a-z0-9A-Z_.\-]+)`,
templates: githubURLTemplates,
},
{
// Assume that any site beginning with "gitlab." works like gitlab.com.
pattern: `^(?P<repo>gitlab\.[a-z0-9A-Z.-]+/[a-z0-9A-Z_.\-]+/[a-z0-9A-Z_.\-]+)(\.git|$)`,
templates: githubURLTemplates,
},
{
pattern: `^(?P<repo>gitee\.com/[a-z0-9A-Z_.\-]+/[a-z0-9A-Z_.\-]+)(\.git|$)`,
templates: githubURLTemplates,
},
{
pattern: `^(?P<repo>git\.sr\.ht/~[a-z0-9A-Z_.\-]+/[a-z0-9A-Z_.\-]+)`,
templates: urlTemplates{
Directory: "{repo}/tree/{commit}/{dir}",
File: "{repo}/tree/{commit}/{file}",
Line: "{repo}/tree/{commit}/{file}#L{line}",
Raw: "{repo}/blob/{commit}/{file}",
},
},
{
pattern: `^(?P<repo>git\.fd\.io/[a-z0-9A-Z_.\-]+)`,
templates: fdioURLTemplates,
transformCommit: fdioTransformCommit,
},
{
pattern: `^(?P<repo>git\.pirl\.io/[a-z0-9A-Z_.\-]+/[a-z0-9A-Z_.\-]+)`,
templates: gitlab2URLTemplates,
},
{
pattern: `^(?P<repo>gitea\.com/[a-z0-9A-Z_.\-]+/[a-z0-9A-Z_.\-]+)(\.git|$)`,
templates: giteaURLTemplates,
transformCommit: giteaTransformCommit,
},
{
// Assume that any site beginning with "gitea." works like gitea.com.
pattern: `^(?P<repo>gitea\.[a-z0-9A-Z.-]+/[a-z0-9A-Z_.\-]+/[a-z0-9A-Z_.\-]+)(\.git|$)`,
templates: giteaURLTemplates,
transformCommit: giteaTransformCommit,
},
{
pattern: `^(?P<repo>go\.isomorphicgo\.org/[a-z0-9A-Z_.\-]+/[a-z0-9A-Z_.\-]+)(\.git|$)`,
templates: giteaURLTemplates,
transformCommit: giteaTransformCommit,
},
{
pattern: `^(?P<repo>git\.openprivacy\.ca/[a-z0-9A-Z_.\-]+/[a-z0-9A-Z_.\-]+)(\.git|$)`,
templates: giteaURLTemplates,
transformCommit: giteaTransformCommit,
},
{
pattern: `^(?P<repo>gogs\.[a-z0-9A-Z.-]+/[a-z0-9A-Z_.\-]+/[a-z0-9A-Z_.\-]+)(\.git|$)`,
// Gogs uses the same basic structure as Gitea, but omits the type of
// commit ("tag" or "commit"), so we don't need a transformCommit
// function. Gogs does not support short hashes, but we create those
// URLs anyway. See gogs/gogs#6242.
templates: giteaURLTemplates,
},
{
pattern: `^(?P<repo>dmitri\.shuralyov\.com\/.+)$`,
templates: urlTemplates{
Repo: "{repo}/...",
Directory: "https://gotools.org/{importPath}?rev={commit}",
File: "https://gotools.org/{importPath}?rev={commit}#{base}",
Line: "https://gotools.org/{importPath}?rev={commit}#{base}-L{line}",
},
},
{
pattern: `^(?P<repo>blitiri\.com\.ar/go/.+)$`,
templates: urlTemplates{
Repo: "{repo}",
Directory: "{repo}/b/master/t/{dir}",
File: "{repo}/b/master/t/{dir}f={file}.html",
Line: "{repo}/b/master/t/{dir}f={file}.html#line-{line}",
},
},
// Patterns that match the general go command pattern, where they must have
// a ".git" repo suffix in an import path. If matching a repo URL from a meta tag,
// there is no ".git".
{
pattern: `^(?P<repo>[^.]+\.googlesource\.com/[^.]+)(\.git|$)`,
templates: googlesourceURLTemplates,
},
{
pattern: `^(?P<repo>git\.apache\.org/[^.]+)(\.git|$)`,
templates: githubURLTemplates,
},
// General syntax for the go command. We can extract the repo and directory, but
// we don't know the URL templates.
// Must be last in this list.
{
pattern: `(?P<repo>([a-z0-9.\-]+\.)+[a-z0-9.\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\-]+)+?)\.(bzr|fossil|git|hg|svn)`,
templates: urlTemplates{},
},
}
func init() {
for i := range patterns {
re := regexp.MustCompile(patterns[i].pattern)
// The pattern regexp must contain a group named "repo".
found := false
for _, n := range re.SubexpNames() {
if n == "repo" {
found = true
break
}
}
if !found {
panic(fmt.Sprintf("pattern %s missing <repo> group", patterns[i].pattern))
}
patterns[i].re = re
}
}
// giteaTransformCommit transforms commits for the Gitea code hosting system.
func | (commit string, isHash bool) string {
// Hashes use "commit", tags use "tag".
// Short hashes are supported as of v1.14.0.
if isHash {
return "commit/" + commit
}
return "tag/" + commit
}
func fdioTransformCommit(commit string, isHash bool) string {
// hashes use "?id=", tags use "?h="
p := "h"
if isHash {
p = "id"
}
return fmt.Sprintf("%s=%s", p, commit)
}
// urlTemplates describes how to build URLs from bits of source information.
// The fields are exported for JSON encoding.
//
// The template variables are:
//
// • {repo} - Repository URL with "https://" prefix ("https://example.com/myrepo").
// • {importPath} - Package import path ("example.com/myrepo/mypkg").
// • {commit} - Tag name or commit hash corresponding to version ("v0.1.0" or "1234567890ab").
// • {dir} - Path to directory of the package, relative to repo root ("mypkg").
// • {file} - Path to file containing the identifier, relative to repo root ("mypkg/file.go").
// • {base} - Base name of file containing the identifier, including file extension ("file.go").
// • {line} - Line number for the identifier ("41").
//
type urlTemplates struct {
Repo string `json:",omitempty"` // Optional URL template for the repository home page, with {repo}. If left empty, a default template "{repo}" is used.
Directory string // URL template for a directory, with {repo}, {importPath}, {commit}, {dir}.
File string // URL template for a file, with {repo}, {importPath}, {commit}, {file}, {base}.
Line string // URL template for a line, with {repo}, {importPath}, {commit}, {file}, {base}, {line}.
Raw string // Optional URL template for the raw contents of a file, with {repo}, {commit}, {file}.
}
var (
githubURLTemplates = urlTemplates{
Directory: "{repo}/tree/{commit}/{dir}",
File: "{repo}/blob/{commit}/{file}",
Line: "{repo}/blob/{commit}/{file}#L{line}",
Raw: "{repo}/raw/{commit}/{file}",
}
bitbucketURLTemplates = urlTemplates{
Directory: "{repo}/src/{commit}/{dir}",
File: "{repo}/src/{commit}/{file}",
Line: "{repo}/src/{commit}/{file}#lines-{line}",
Raw: "{repo}/raw/{commit}/{file}",
}
giteaURLTemplates = urlTemplates{
Directory: "{repo}/src/{commit}/{dir}",
File: "{repo}/src/{commit}/{file}",
Line: "{repo}/src/{commit}/{file}#L{line}",
Raw: "{repo}/raw/{commit}/{file}",
}
googlesourceURLTemplates = urlTemplates{
Directory: "{repo}/+/{commit}/{dir}",
File: "{repo}/+/{commit}/{file}",
Line: "{repo}/+/{commit}/{file}#{line}",
// Gitiles has no support for serving raw content at this time.
}
gitlab2URLTemplates = urlTemplates{
Directory: "{repo}/-/tree/{commit}/{dir}",
File: "{repo}/-/blob/{commit}/{file}",
Line: "{repo}/-/blob/{commit}/{file}#L{line}",
Raw: "{repo}/-/raw/{commit}/{file}",
}
fdioURLTemplates = urlTemplates{
Directory: "{repo}/tree/{dir}?{commit}",
File: "{repo}/tree/{file}?{commit}",
Line: "{repo}/tree/{file}?{commit}#n{line}",
Raw: "{repo}/plain/{file}?{commit}",
}
)
// commitFromVersion returns a string that refers to a commit corresponding to version.
// It also reports whether the returned value is a commit hash.
// The string may be a tag, or it may be the hash or similar unique identifier of a commit.
// The second argument is the module path relative to the repo root.
func commitFromVersion(vers, relativeModulePath string) (commit string, isHash bool) {
// Commit for the module: either a sha for pseudoversions, or a tag.
v := strings.TrimSuffix(vers, "+incompatible")
if version.IsPseudo(v) {
// Use the commit hash at the end.
return v[strings.LastIndex(v, "-")+1:], true
} else {
// The tags for a nested module begin with the relative module path of the module,
// removing a "/vN" suffix if N > 1.
prefix := removeVersionSuffix(relativeModulePath)
if prefix != "" {
return prefix + "/" + v, false
}
return v, false
}
}
// trimVCSSuffix removes a VCS suffix from a repo URL in selected cases.
//
// The Go command allows a VCS suffix on a repo, like github.com/foo/bar.git. But
// some code hosting sites don't support all paths constructed from such URLs.
// For example, GitHub will redirect github.com/foo/bar.git to github.com/foo/bar,
// but will 404 on github.com/goo/bar.git/tree/master and any other URL with a
// non-empty path.
//
// To be conservative, we remove the suffix only in cases where we know it's
// wrong.
func trimVCSSuffix(repoURL string) string {
if !strings.HasSuffix(repoURL, ".git") {
return repoURL
}
if strings.HasPrefix(repoURL, "https://github.com/") || strings.HasPrefix(repoURL, "https://gitlab.com/") {
return strings.TrimSuffix(repoURL, ".git")
}
return repoURL
}
// The following code copied from cmd/go/internal/get:
// expand rewrites s to replace {k} with match[k] for each key k in match.
func expand(s string, match map[string]string) string {
// We want to replace each match exactly once, and the result of expansion
// must not depend on the iteration order through the map.
// A strings.Replacer has exactly the properties we're looking for.
oldNew := make([]string, 0, 2*len(match))
for k, v := range match {
oldNew = append(oldNew, "{"+k+"}", v)
}
return strings.NewReplacer(oldNew...).Replace(s)
}
// NewGitHubInfo creates a source.Info with GitHub URL templates.
// It is for testing only.
func NewGitHubInfo(repoURL, moduleDir, commit string) *Info {
return &Info{
repoURL: trimVCSSuffix(repoURL),
moduleDir: moduleDir,
commit: commit,
templates: githubURLTemplates,
}
}
// NewStdlibInfo returns a source.Info for the standard library at the given
// semantic version. It panics if the version does not correspond to a Go release
// tag. It is for testing only.
func NewStdlibInfo(version string) *Info {
info, err := newStdlibInfo(version)
if err != nil {
panic(err)
}
return info
}
| giteaTransformCommit |
ServiceInterface.go | // Code generated by mockery v1.0.0. DO NOT EDIT.
// Regenerate this file using `make email-mocks`.
package mocks
import (
io "io"
i18n "github.com/mattermost/mattermost-server/v6/shared/i18n"
mock "github.com/stretchr/testify/mock"
model "github.com/mattermost/mattermost-server/v6/model"
templates "github.com/mattermost/mattermost-server/v6/shared/templates"
throttled "github.com/throttled/throttled"
)
// ServiceInterface is an autogenerated mock type for the ServiceInterface type
type ServiceInterface struct {
mock.Mock
}
// AddNotificationEmailToBatch provides a mock function with given fields: user, post, team
func (_m *ServiceInterface) AddNotificationEmailToBatch(user *model.User, post *model.Post, team *model.Team) *model.AppError {
ret := _m.Called(user, post, team)
var r0 *model.AppError
if rf, ok := ret.Get(0).(func(*model.User, *model.Post, *model.Team) *model.AppError); ok {
r0 = rf(user, post, team)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*model.AppError)
}
}
return r0
}
// CreateVerifyEmailToken provides a mock function with given fields: userID, newEmail
func (_m *ServiceInterface) CreateVerifyEmailToken(userID string, newEmail string) (*model.Token, error) {
ret := _m.Called(userID, newEmail)
var r0 *model.Token
if rf, ok := ret.Get(0).(func(string, string) *model.Token); ok {
r0 = rf(userID, newEmail)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*model.Token)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(string, string) error); ok {
r1 = rf(userID, newEmail)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetMessageForNotification provides a mock function with given fields: post, translateFunc
func (_m *ServiceInterface) GetMessageForNotification(post *model.Post, translateFunc i18n.TranslateFunc) string {
ret := _m.Called(post, translateFunc)
var r0 string
if rf, ok := ret.Get(0).(func(*model.Post, i18n.TranslateFunc) string); ok {
r0 = rf(post, translateFunc)
} else {
r0 = ret.Get(0).(string)
}
return r0
}
// GetPerDayEmailRateLimiter provides a mock function with given fields:
func (_m *ServiceInterface) GetPerDayEmailRateLimiter() *throttled.GCRARateLimiter {
ret := _m.Called()
var r0 *throttled.GCRARateLimiter
if rf, ok := ret.Get(0).(func() *throttled.GCRARateLimiter); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*throttled.GCRARateLimiter)
}
}
return r0
}
// InitEmailBatching provides a mock function with given fields:
func (_m *ServiceInterface) InitEmailBatching() {
_m.Called()
}
// NewEmailTemplateData provides a mock function with given fields: locale
func (_m *ServiceInterface) NewEmailTemplateData(locale string) templates.Data {
ret := _m.Called(locale)
var r0 templates.Data
if rf, ok := ret.Get(0).(func(string) templates.Data); ok {
r0 = rf(locale)
} else {
r0 = ret.Get(0).(templates.Data)
}
return r0
}
// SendAtUserLimitWarningEmail provides a mock function with given fields: _a0, locale, siteURL
func (_m *ServiceInterface) SendAtUserLimitWarningEmail(_a0 string, locale string, siteURL string) (bool, error) {
ret := _m.Called(_a0, locale, siteURL)
var r0 bool
if rf, ok := ret.Get(0).(func(string, string, string) bool); ok {
r0 = rf(_a0, locale, siteURL)
} else {
r0 = ret.Get(0).(bool)
}
var r1 error
if rf, ok := ret.Get(1).(func(string, string, string) error); ok {
r1 = rf(_a0, locale, siteURL)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// SendChangeUsernameEmail provides a mock function with given fields: newUsername, _a1, locale, siteURL
func (_m *ServiceInterface) SendChangeUsernameEmail(newUsername string, _a1 string, locale string, siteURL string) error {
ret := _m.Called(newUsername, _a1, locale, siteURL)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, string) error); ok {
r0 = rf(newUsername, _a1, locale, siteURL)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendCloudTrialEndWarningEmail provides a mock function with given fields: userEmail, name, trialEndDate, locale, siteURL
func (_m *ServiceInterface) SendCloudTrialEndWarningEmail(userEmail string, name string, trialEndDate string, locale string, siteURL string) error {
ret := _m.Called(userEmail, name, trialEndDate, locale, siteURL)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, string, string) error); ok {
r0 = rf(userEmail, name, trialEndDate, locale, siteURL)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendCloudTrialEndedEmail provides a mock function with given fields: userEmail, name, locale, siteURL
func (_m *ServiceInterface) SendCloudTrialEndedEmail(userEmail string, name string, locale string, siteURL string) error {
ret := _m.Called(userEmail, name, locale, siteURL)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, string) error); ok {
r0 = rf(userEmail, name, locale, siteURL)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendCloudWelcomeEmail provides a mock function with given fields: userEmail, locale, teamInviteID, workSpaceName, dns, siteURL
func (_m *ServiceInterface) SendCloudWelcomeEmail(userEmail string, locale string, teamInviteID string, workSpaceName string, dns string, siteURL string) error {
ret := _m.Called(userEmail, locale, teamInviteID, workSpaceName, dns, siteURL)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, string, string, string) error); ok {
r0 = rf(userEmail, locale, teamInviteID, workSpaceName, dns, siteURL)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendDeactivateAccountEmail provides a mock function with given fields: _a0, locale, siteURL
func (_m *ServiceInterface) SendDeactivateAccountEmail(_a0 string, locale string, siteURL string) error {
ret := _m.Called(_a0, locale, siteURL)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string) error); ok {
r0 = rf(_a0, locale, siteURL)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendEmailChangeEmail provides a mock function with given fields: oldEmail, newEmail, locale, siteURL
func (_m *ServiceInterface) SendEmailChangeEmail(oldEmail string, newEmail string, locale string, siteURL string) error {
ret := _m.Called(oldEmail, newEmail, locale, siteURL)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, string) error); ok {
r0 = rf(oldEmail, newEmail, locale, siteURL)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendEmailChangeVerifyEmail provides a mock function with given fields: newUserEmail, locale, siteURL, token
func (_m *ServiceInterface) SendEmailChangeVerifyEmail(newUserEmail string, locale string, siteURL string, token string) error {
ret := _m.Called(newUserEmail, locale, siteURL, token)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, string) error); ok {
r0 = rf(newUserEmail, locale, siteURL, token)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendGuestInviteEmails provides a mock function with given fields: team, channels, senderName, senderUserId, senderProfileImage, invites, siteURL, message, errorWhenNotSent
func (_m *ServiceInterface) SendGuestInviteEmails(team *model.Team, channels []*model.Channel, senderName string, senderUserId string, senderProfileImage []byte, invites []string, siteURL string, message string, errorWhenNotSent bool) error {
ret := _m.Called(team, channels, senderName, senderUserId, senderProfileImage, invites, siteURL, message, errorWhenNotSent)
var r0 error
if rf, ok := ret.Get(0).(func(*model.Team, []*model.Channel, string, string, []byte, []string, string, string, bool) error); ok {
r0 = rf(team, channels, senderName, senderUserId, senderProfileImage, invites, siteURL, message, errorWhenNotSent)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendInviteEmails provides a mock function with given fields: team, senderName, senderUserId, invites, siteURL, reminderData, errorWhenNotSent
func (_m *ServiceInterface) SendInviteEmails(team *model.Team, senderName string, senderUserId string, invites []string, siteURL string, reminderData *model.TeamInviteReminderData, errorWhenNotSent bool) error {
ret := _m.Called(team, senderName, senderUserId, invites, siteURL, reminderData, errorWhenNotSent)
var r0 error
if rf, ok := ret.Get(0).(func(*model.Team, string, string, []string, string, *model.TeamInviteReminderData, bool) error); ok {
r0 = rf(team, senderName, senderUserId, invites, siteURL, reminderData, errorWhenNotSent)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendLicenseInactivityEmail provides a mock function with given fields: _a0, name, locale, siteURL
func (_m *ServiceInterface) SendLicenseInactivityEmail(_a0 string, name string, locale string, siteURL string) error {
ret := _m.Called(_a0, name, locale, siteURL)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, string) error); ok {
r0 = rf(_a0, name, locale, siteURL)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendLicenseUpForRenewalEmail provides a mock function with given fields: _a0, name, locale, siteURL, renewalLink, daysToExpiration
func (_m *ServiceInterface) SendLicenseUpForRenewalEmail(_a0 string, name string, locale string, siteURL string, renewalLink string, daysToExpiration int) error {
ret := _m.Called(_a0, name, locale, siteURL, renewalLink, daysToExpiration)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, string, string, int) error); ok {
r0 = rf(_a0, name, locale, siteURL, renewalLink, daysToExpiration)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendMailWithEmbeddedFiles provides a mock function with given fields: to, subject, htmlBody, embeddedFiles
func (_m *ServiceInterface) SendMailWithEmbeddedFiles(to string, subject string, htmlBody string, embeddedFiles map[string]io.Reader) error {
ret := _m.Called(to, subject, htmlBody, embeddedFiles)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, map[string]io.Reader) error); ok {
r0 = rf(to, subject, htmlBody, embeddedFiles)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendMfaChangeEmail provides a mock function with given fields: _a0, activated, locale, siteURL
func (_m *ServiceInterface) SendMfaChangeEmail(_a0 string, activated bool, locale string, siteURL string) error {
ret := _m.Called(_a0, activated, locale, siteURL)
var r0 error
if rf, ok := ret.Get(0).(func(string, bool, string, string) error); ok {
r0 = rf(_a0, activated, locale, siteURL)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendNoCardPaymentFailedEmail provides a mock function with given fields: _a0, locale, siteURL
func (_m *ServiceInterface) SendNoCardPaymentFailedEmail(_a0 string, locale string, siteURL string) error {
ret := _m.Called(_a0, locale, siteURL)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string) error); ok {
r0 = rf(_a0, locale, siteURL)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendNotificationMail provides a mock function with given fields: to, subject, htmlBody
func (_m *ServiceInterface) SendNotificationMail(to string, subject string, htmlBody string) error {
ret := _m.Called(to, subject, htmlBody)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string) error); ok { |
return r0
}
// SendOverUserFourteenDayWarningEmail provides a mock function with given fields: _a0, locale, siteURL, overLimitDate
func (_m *ServiceInterface) SendOverUserFourteenDayWarningEmail(_a0 string, locale string, siteURL string, overLimitDate string) (bool, error) {
ret := _m.Called(_a0, locale, siteURL, overLimitDate)
var r0 bool
if rf, ok := ret.Get(0).(func(string, string, string, string) bool); ok {
r0 = rf(_a0, locale, siteURL, overLimitDate)
} else {
r0 = ret.Get(0).(bool)
}
var r1 error
if rf, ok := ret.Get(1).(func(string, string, string, string) error); ok {
r1 = rf(_a0, locale, siteURL, overLimitDate)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// SendOverUserLimitNinetyDayWarningEmail provides a mock function with given fields: _a0, locale, siteURL, overLimitDate
func (_m *ServiceInterface) SendOverUserLimitNinetyDayWarningEmail(_a0 string, locale string, siteURL string, overLimitDate string) (bool, error) {
ret := _m.Called(_a0, locale, siteURL, overLimitDate)
var r0 bool
if rf, ok := ret.Get(0).(func(string, string, string, string) bool); ok {
r0 = rf(_a0, locale, siteURL, overLimitDate)
} else {
r0 = ret.Get(0).(bool)
}
var r1 error
if rf, ok := ret.Get(1).(func(string, string, string, string) error); ok {
r1 = rf(_a0, locale, siteURL, overLimitDate)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// SendOverUserLimitThirtyDayWarningEmail provides a mock function with given fields: _a0, locale, siteURL
func (_m *ServiceInterface) SendOverUserLimitThirtyDayWarningEmail(_a0 string, locale string, siteURL string) (bool, error) {
ret := _m.Called(_a0, locale, siteURL)
var r0 bool
if rf, ok := ret.Get(0).(func(string, string, string) bool); ok {
r0 = rf(_a0, locale, siteURL)
} else {
r0 = ret.Get(0).(bool)
}
var r1 error
if rf, ok := ret.Get(1).(func(string, string, string) error); ok {
r1 = rf(_a0, locale, siteURL)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// SendOverUserLimitWarningEmail provides a mock function with given fields: _a0, locale, siteURL
func (_m *ServiceInterface) SendOverUserLimitWarningEmail(_a0 string, locale string, siteURL string) (bool, error) {
ret := _m.Called(_a0, locale, siteURL)
var r0 bool
if rf, ok := ret.Get(0).(func(string, string, string) bool); ok {
r0 = rf(_a0, locale, siteURL)
} else {
r0 = ret.Get(0).(bool)
}
var r1 error
if rf, ok := ret.Get(1).(func(string, string, string) error); ok {
r1 = rf(_a0, locale, siteURL)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// SendOverUserLimitWorkspaceSuspendedWarningEmail provides a mock function with given fields: _a0, locale, siteURL
func (_m *ServiceInterface) SendOverUserLimitWorkspaceSuspendedWarningEmail(_a0 string, locale string, siteURL string) (bool, error) {
ret := _m.Called(_a0, locale, siteURL)
var r0 bool
if rf, ok := ret.Get(0).(func(string, string, string) bool); ok {
r0 = rf(_a0, locale, siteURL)
} else {
r0 = ret.Get(0).(bool)
}
var r1 error
if rf, ok := ret.Get(1).(func(string, string, string) error); ok {
r1 = rf(_a0, locale, siteURL)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// SendOverUserSevenDayWarningEmail provides a mock function with given fields: _a0, locale, siteURL
func (_m *ServiceInterface) SendOverUserSevenDayWarningEmail(_a0 string, locale string, siteURL string) (bool, error) {
ret := _m.Called(_a0, locale, siteURL)
var r0 bool
if rf, ok := ret.Get(0).(func(string, string, string) bool); ok {
r0 = rf(_a0, locale, siteURL)
} else {
r0 = ret.Get(0).(bool)
}
var r1 error
if rf, ok := ret.Get(1).(func(string, string, string) error); ok {
r1 = rf(_a0, locale, siteURL)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// SendPasswordChangeEmail provides a mock function with given fields: _a0, method, locale, siteURL
func (_m *ServiceInterface) SendPasswordChangeEmail(_a0 string, method string, locale string, siteURL string) error {
ret := _m.Called(_a0, method, locale, siteURL)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, string) error); ok {
r0 = rf(_a0, method, locale, siteURL)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendPasswordResetEmail provides a mock function with given fields: _a0, token, locale, siteURL
func (_m *ServiceInterface) SendPasswordResetEmail(_a0 string, token *model.Token, locale string, siteURL string) (bool, error) {
ret := _m.Called(_a0, token, locale, siteURL)
var r0 bool
if rf, ok := ret.Get(0).(func(string, *model.Token, string, string) bool); ok {
r0 = rf(_a0, token, locale, siteURL)
} else {
r0 = ret.Get(0).(bool)
}
var r1 error
if rf, ok := ret.Get(1).(func(string, *model.Token, string, string) error); ok {
r1 = rf(_a0, token, locale, siteURL)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// SendPaymentFailedEmail provides a mock function with given fields: _a0, locale, failedPayment, siteURL
func (_m *ServiceInterface) SendPaymentFailedEmail(_a0 string, locale string, failedPayment *model.FailedPayment, siteURL string) (bool, error) {
ret := _m.Called(_a0, locale, failedPayment, siteURL)
var r0 bool
if rf, ok := ret.Get(0).(func(string, string, *model.FailedPayment, string) bool); ok {
r0 = rf(_a0, locale, failedPayment, siteURL)
} else {
r0 = ret.Get(0).(bool)
}
var r1 error
if rf, ok := ret.Get(1).(func(string, string, *model.FailedPayment, string) error); ok {
r1 = rf(_a0, locale, failedPayment, siteURL)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// SendRemoveExpiredLicenseEmail provides a mock function with given fields: renewalLink, _a1, locale, siteURL
func (_m *ServiceInterface) SendRemoveExpiredLicenseEmail(renewalLink string, _a1 string, locale string, siteURL string) error {
ret := _m.Called(renewalLink, _a1, locale, siteURL)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, string) error); ok {
r0 = rf(renewalLink, _a1, locale, siteURL)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendSignInChangeEmail provides a mock function with given fields: _a0, method, locale, siteURL
func (_m *ServiceInterface) SendSignInChangeEmail(_a0 string, method string, locale string, siteURL string) error {
ret := _m.Called(_a0, method, locale, siteURL)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, string) error); ok {
r0 = rf(_a0, method, locale, siteURL)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendSuspensionEmailToSupport provides a mock function with given fields: _a0, installationID, customerID, subscriptionID, siteURL, userCount
func (_m *ServiceInterface) SendSuspensionEmailToSupport(_a0 string, installationID string, customerID string, subscriptionID string, siteURL string, userCount int64) (bool, error) {
ret := _m.Called(_a0, installationID, customerID, subscriptionID, siteURL, userCount)
var r0 bool
if rf, ok := ret.Get(0).(func(string, string, string, string, string, int64) bool); ok {
r0 = rf(_a0, installationID, customerID, subscriptionID, siteURL, userCount)
} else {
r0 = ret.Get(0).(bool)
}
var r1 error
if rf, ok := ret.Get(1).(func(string, string, string, string, string, int64) error); ok {
r1 = rf(_a0, installationID, customerID, subscriptionID, siteURL, userCount)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// SendUpgradeEmail provides a mock function with given fields: user, _a1, locale, siteURL, action
func (_m *ServiceInterface) SendUpgradeEmail(user string, _a1 string, locale string, siteURL string, action string) (bool, error) {
ret := _m.Called(user, _a1, locale, siteURL, action)
var r0 bool
if rf, ok := ret.Get(0).(func(string, string, string, string, string) bool); ok {
r0 = rf(user, _a1, locale, siteURL, action)
} else {
r0 = ret.Get(0).(bool)
}
var r1 error
if rf, ok := ret.Get(1).(func(string, string, string, string, string) error); ok {
r1 = rf(user, _a1, locale, siteURL, action)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// SendUserAccessTokenAddedEmail provides a mock function with given fields: _a0, locale, siteURL
func (_m *ServiceInterface) SendUserAccessTokenAddedEmail(_a0 string, locale string, siteURL string) error {
ret := _m.Called(_a0, locale, siteURL)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string) error); ok {
r0 = rf(_a0, locale, siteURL)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendVerifyEmail provides a mock function with given fields: userEmail, locale, siteURL, token, redirect
func (_m *ServiceInterface) SendVerifyEmail(userEmail string, locale string, siteURL string, token string, redirect string) error {
ret := _m.Called(userEmail, locale, siteURL, token, redirect)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, string, string, string) error); ok {
r0 = rf(userEmail, locale, siteURL, token, redirect)
} else {
r0 = ret.Error(0)
}
return r0
}
// SendWelcomeEmail provides a mock function with given fields: userID, _a1, verified, disableWelcomeEmail, locale, siteURL, redirect
func (_m *ServiceInterface) SendWelcomeEmail(userID string, _a1 string, verified bool, disableWelcomeEmail bool, locale string, siteURL string, redirect string) error {
ret := _m.Called(userID, _a1, verified, disableWelcomeEmail, locale, siteURL, redirect)
var r0 error
if rf, ok := ret.Get(0).(func(string, string, bool, bool, string, string, string) error); ok {
r0 = rf(userID, _a1, verified, disableWelcomeEmail, locale, siteURL, redirect)
} else {
r0 = ret.Error(0)
}
return r0
} | r0 = rf(to, subject, htmlBody)
} else {
r0 = ret.Error(0)
} |
transaction.rs | use crate::address::Address;
use crate::error::Error;
use crate::opcodes::GTXCOST;
use crate::opcodes::GTXDATANONZERO;
use crate::opcodes::GTXDATAZERO;
use crate::private_key::PrivateKey;
use crate::rlp::AddressDef;
use crate::signature::Signature;
use crate::types::BigEndianInt;
use crate::u256;
use crate::utils::bytes_to_hex_str;
use crate::Uint256;
use serde::Serialize;
use serde::Serializer;
use serde_bytes::{ByteBuf, Bytes};
use serde_rlp::de::from_bytes;
use serde_rlp::ser::to_bytes;
use sha3::{Digest, Keccak256};
use std::fmt;
use std::fmt::Display;
/// Transaction as explained in the Ethereum Yellow paper section 4.2
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct | {
pub nonce: Uint256,
pub gas_price: Uint256,
pub gas_limit: Uint256,
pub to: Address,
pub value: Uint256,
pub data: Vec<u8>,
pub signature: Option<Signature>,
}
impl Display for Transaction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"0x{}",
bytes_to_hex_str(&self.to_bytes().unwrap_or_default())
)
}
}
impl fmt::LowerHex for Transaction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if f.alternate() {
write!(
f,
"0x{}",
bytes_to_hex_str(&self.to_bytes().unwrap_or_default()).to_lowercase()
)
} else {
write!(
f,
"{}",
bytes_to_hex_str(&self.to_bytes().unwrap_or_default()).to_lowercase()
)
}
}
}
impl fmt::UpperHex for Transaction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if f.alternate() {
write!(
f,
"0x{}",
bytes_to_hex_str(&self.to_bytes().unwrap_or_default()).to_uppercase()
)
} else {
write!(
f,
"{}",
bytes_to_hex_str(&self.to_bytes().unwrap_or_default()).to_uppercase()
)
}
}
}
impl Serialize for Transaction {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
// Serialization of a transaction without signature serializes
// the data assuming the "vrs" params are set to 0.
let sig = self.signature.unwrap_or_default();
let data = (
&BigEndianInt(self.nonce),
&BigEndianInt(self.gas_price),
&BigEndianInt(self.gas_limit),
&AddressDef(&self.to),
&BigEndianInt(self.value),
&ByteBuf::from(self.data.clone()),
&BigEndianInt(sig.v),
&BigEndianInt(sig.r),
&BigEndianInt(sig.s),
);
data.serialize(serializer)
}
}
/// Naive bytecount function, is slower than the bytecount crate but we only count bytes
/// for this single intrinsic gas function. Also has a limit of u32 bytes which is a 4gb
/// transaction so I think that's reasonable to assume.
fn naive_count_32(haystack: &[u8], needle: u8) -> u32 {
haystack.iter().fold(0, |n, c| n + (*c == needle) as u32)
}
impl Transaction {
pub fn is_valid(&self) -> bool {
// it is not possible for `Uint256` to go above 2**256 - 1
// invalid signature check
if let Some(sig) = self.signature {
if !sig.is_valid() {
return false;
}
}
// TODO check that the signature is actually correct, not just valid
// rudimentary gas limit check, needs opcode awareness
if self.gas_limit < self.intrinsic_gas_used() {
return false;
}
true
}
pub fn intrinsic_gas_used(&self) -> Uint256 {
let num_zero_bytes = naive_count_32(&self.data, 0u8);
let num_non_zero_bytes = self.data.len() as u32 - num_zero_bytes;
// this cannot overflow, should use at most 66 sig bits
Uint256::from_u32(GTXCOST)
.wrapping_add(
Uint256::from_u32(GTXDATAZERO).wrapping_mul(Uint256::from_u32(num_zero_bytes)),
)
.wrapping_add(
Uint256::from_u32(GTXDATANONZERO)
.wrapping_mul(Uint256::from_u32(num_non_zero_bytes)),
)
}
/// Creates a raw data without signature params
fn to_unsigned_tx_params(&self) -> Vec<u8> {
// TODO: Could be refactored in a better way somehow
let data = (
&BigEndianInt(self.nonce),
&BigEndianInt(self.gas_price),
&BigEndianInt(self.gas_limit),
&AddressDef(&self.to),
&BigEndianInt(self.value),
&ByteBuf::from(self.data.clone()),
);
to_bytes(&data).unwrap()
}
fn to_unsigned_tx_params_for_network(&self, network_id: Uint256) -> Vec<u8> {
// assert!(self.signature.is_none());
// TODO: Could be refactored in a better way somehow
let data = (
&BigEndianInt(self.nonce),
&BigEndianInt(self.gas_price),
&BigEndianInt(self.gas_limit),
&AddressDef(&self.to),
&BigEndianInt(self.value),
&ByteBuf::from(self.data.clone()),
&BigEndianInt(network_id),
&ByteBuf::new(),
&ByteBuf::new(),
);
to_bytes(&data).unwrap()
}
/// Creates a Transaction with new
#[must_use]
pub fn sign(&self, key: &PrivateKey, network_id: Option<u64>) -> Transaction {
// This is a special matcher to prepare raw RLP data with correct network_id.
let rlpdata = match network_id {
Some(network_id) => {
assert!((1..9_223_372_036_854_775_790u64).contains(&network_id)); // 1 <= id < 2**63 - 18
self.to_unsigned_tx_params_for_network(Uint256::from_u64(network_id))
}
None => self.to_unsigned_tx_params(),
};
// Prepare a raw hash of RLP encoded TX params
let rawhash = Keccak256::digest(&rlpdata);
let mut sig = key.sign_hash(&rawhash);
if let Some(network_id) = network_id {
// Account v for the network_id value
sig.v = sig
.v
.checked_add(u256!(8))
.unwrap()
.checked_add(Uint256::from_u64(network_id).shl1().unwrap())
.unwrap();
}
let mut tx = self.clone();
tx.signature = Some(sig);
tx
}
/// Get the sender's `Address`; derived from the `signature` field, does not keep with convention
/// returns error if the signature is invalid. Traditional return would be `constants::NULL_ADDRESS`
/// you may need to insert that yourself after matching on errors
pub fn sender(&self) -> Result<Address, Error> {
if self.signature.is_none() {
return Err(Error::NoSignature);
}
let sig = self.signature.as_ref().unwrap();
if !sig.is_valid() {
Err(Error::InvalidSignatureValues)
} else {
let sighash = if sig.v == u256!(27) || sig.v == u256!(28) {
Keccak256::digest(&self.to_unsigned_tx_params())
} else if sig.v >= u256!(37) {
let network_id = sig.network_id().ok_or(Error::InvalidNetworkId)?;
// In this case hash of the transaction is usual RLP paremeters but "VRS" params
// are swapped for [network_id, '', '']. See Appendix F (285)
let rlp_data = self.to_unsigned_tx_params_for_network(network_id);
Keccak256::digest(&rlp_data)
} else {
// All other V values would be errorneous for our calculations
return Err(Error::InvalidV);
};
// Validate signatures
if !sig.is_valid() {
return Err(Error::InvalidSignatureValues);
}
sig.recover(&sighash)
}
}
/// Creates a hash of a transaction given all TX attributes
/// including signature (VRS) whether it is present, or not.
pub fn hash(&self) -> Vec<u8> {
Keccak256::digest(&to_bytes(&self).unwrap()).to_vec()
}
/// Creates a byte representation of this transaction
pub fn to_bytes(&self) -> Result<Vec<u8>, Error> {
to_bytes(&self).map_err(|_| Error::SerializeRlp)
}
/// Creates a transaction from raw RLP bytes, can not decode unsigned transactions
pub fn decode_from_rlp(raw_rlp_bytes: &[u8]) -> Result<Self, Error> {
// Try to decode the bytes into a Vec of Bytes which will enforce structure of a n-element vector with bytearrays.
let data: Vec<&Bytes> = match from_bytes(raw_rlp_bytes) {
Ok(data) => data,
Err(_) => {
return Err(Error::DeserializeRlp);
}
};
// A valid decoded transaction has exactly 9 elements.
if data.len() != 9 {
return Err(Error::DeserializeRlp);
}
Ok(Transaction {
nonce: Uint256::from_bytes(data[0]).ok_or(Error::DeserializeRlp)?,
gas_price: Uint256::from_bytes(data[1]).ok_or(Error::DeserializeRlp)?,
gas_limit: Uint256::from_bytes(data[2]).ok_or(Error::DeserializeRlp)?,
to: Address::from_slice(&*data[3]).unwrap_or_default(),
value: Uint256::from_bytes(data[4]).ok_or(Error::DeserializeRlp)?,
data: (**data[5]).into(),
signature: Some(Signature::new(
Uint256::from_bytes(data[6]).ok_or(Error::DeserializeRlp)?,
Uint256::from_bytes(data[7]).ok_or(Error::DeserializeRlp)?,
Uint256::from_bytes(data[8]).ok_or(Error::DeserializeRlp)?,
)),
})
}
}
#[test]
fn test_vitaliks_eip_158_vitalik_12_json() {
use crate::utils::{bytes_to_hex_str, hex_str_to_bytes};
use serde_rlp::ser::to_bytes;
// https://github.com/ethereum/tests/blob/69f55e8608126e6470c2888a5b344c93c1550f40/TransactionTests/ttEip155VitaliksEip158/Vitalik_12.json
let tx = Transaction {
nonce: u256!(0xe),
gas_price: u256!(0),
gas_limit: u256!(0x493e0),
to: Address::default(), // "" - zeros only
value: u256!(0),
data: hex_str_to_bytes("60f2ff61000080610011600039610011565b6000f3").unwrap(),
signature: Some(Signature::new(
u256!(0x1c),
u256!(0xa310f4d0b26207db76ba4e1e6e7cf1857ee3aa8559bcbc399a6b09bfea2d30b4),
u256!(0x6dff38c645a1486651a717ddf3daccb4fd9a630871ecea0758ddfcf2774f9bc6),
)),
};
let lhs = to_bytes(&tx).unwrap();
let lhs = bytes_to_hex_str(&lhs);
let rhs = "f8610e80830493e080809560f2ff61000080610011600039610011565b6000f31ca0a310f4d0b26207db76ba4e1e6e7cf1857ee3aa8559bcbc399a6b09bfea2d30b4a06dff38c645a1486651a717ddf3daccb4fd9a630871ecea0758ddfcf2774f9bc6".to_owned();
assert_eq!(lhs, rhs);
assert_eq!(
bytes_to_hex_str(tx.sender().unwrap().as_bytes()),
"874b54a8bd152966d63f706bae1ffeb0411921e5"
);
}
#[test]
fn test_vitaliks_eip_158_vitalik_1_json() {
use crate::utils::bytes_to_hex_str;
use serde_rlp::ser::to_bytes;
// https://github.com/ethereum/tests/blob/69f55e8608126e6470c2888a5b344c93c1550f40/TransactionTests/ttEip155VitaliksEip158/Vitalik_12.json
let tx = Transaction {
nonce: u256!(0),
gas_price: u256!(0x4a817c800),
gas_limit: u256!(0x5208),
to: "3535353535353535353535353535353535353535".parse().unwrap(),
value: u256!(0),
data: Vec::new(),
signature: Some(Signature::new(
u256!(0x25),
u256!(0x44852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d),
u256!(0x44852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d),
)),
};
let lhs = to_bytes(&tx).unwrap();
let lhs = bytes_to_hex_str(&lhs);
let rhs = "f864808504a817c800825208943535353535353535353535353535353535353535808025a0044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116da0044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d".to_owned();
assert_eq!(lhs, rhs);
}
#[test]
fn test_basictests_txtest_1() {
use crate::utils::bytes_to_hex_str;
use serde_rlp::ser::to_bytes;
// https://github.com/ethereum/tests/blob/b44cea1cccf1e4b63a05d1ca9f70f2063f28da6d/BasicTests/txtest.json
let tx = Transaction {
nonce: u256!(0),
gas_price: u256!(1000000000000),
gas_limit: u256!(10000),
to: "13978aee95f38490e9769c39b2773ed763d9cd5f".parse().unwrap(),
value: u256!(10000000000000000),
data: Vec::new(),
signature: None,
};
// Unsigned
let lhs = to_bytes(&tx).unwrap();
let lhs = bytes_to_hex_str(&lhs);
let rhs =
"eb8085e8d4a510008227109413978aee95f38490e9769c39b2773ed763d9cd5f872386f26fc1000080808080"
.to_owned();
assert_eq!(lhs, rhs);
// Signed
let key: PrivateKey = "c85ef7d79691fe79573b1a7064c19c1a9819ebdbd1faaab1a8ec92344438aaf4"
.parse()
.unwrap();
let signed_tx = tx.sign(&key, None);
let lhs = to_bytes(&signed_tx).unwrap();
let lhs = bytes_to_hex_str(&lhs);
let rhs = "f86b8085e8d4a510008227109413978aee95f38490e9769c39b2773ed763d9cd5f872386f26fc10000801ba0eab47c1a49bf2fe5d40e01d313900e19ca485867d462fe06e139e3a536c6d4f4a014a569d327dcda4b29f74f93c0e9729d2f49ad726e703f9cd90dbb0fbf6649f1".to_owned();
assert_eq!(lhs, rhs);
}
#[test]
fn test_basictests_txtest_2() {
use crate::utils::{bytes_to_hex_str, hex_str_to_bytes};
use serde_rlp::ser::to_bytes;
// https://github.com/ethereum/tests/blob/b44cea1cccf1e4b63a05d1ca9f70f2063f28da6d/BasicTests/txtest.json
let tx = Transaction {
nonce: u256!(0),
gas_price: u256!(1000000000000),
gas_limit: u256!(10000),
to: Address::default(),
value: u256!(0),
data: hex_str_to_bytes("6025515b525b600a37f260003556601b596020356000355760015b525b54602052f260255860005b525b54602052f2").unwrap(),
signature: None
};
// Unsigned
let lhs = to_bytes(&tx).unwrap();
let lhs = bytes_to_hex_str(&lhs);
let rhs = "f83f8085e8d4a510008227108080af6025515b525b600a37f260003556601b596020356000355760015b525b54602052f260255860005b525b54602052f2808080".to_owned();
assert_eq!(lhs, rhs);
// Signed
let key: PrivateKey = "c87f65ff3f271bf5dc8643484f66b200109caffe4bf98c4cb393dc35740b28c0"
.parse()
.unwrap();
let signed_tx = tx.sign(&key, None);
let lhs = to_bytes(&signed_tx).unwrap();
let lhs = bytes_to_hex_str(&lhs);
// This value is wrong
let rhs = "f87f8085e8d4a510008227108080af6025515b525b600a37f260003556601b596020356000355760015b525b54602052f260255860005b525b54602052f21ca05afed0244d0da90b67cf8979b0f246432a5112c0d31e8d5eedd2bc17b171c694a044efca37cb9883d1ee7a47236f3592df152931a930566933de2dc6e341c11426".to_owned();
assert_eq!(lhs, rhs);
}
| Transaction |
xmla_store.js | /**
* DevExtreme (ui/pivot_grid/xmla_store/xmla_store.js)
* Version: 19.1.5
* Build date: Tue Jul 30 2019
*
* Copyright (c) 2012 - 2019 Developer Express Inc. ALL RIGHTS RESERVED
* Read about DevExtreme licensing here: https://js.devexpress.com/Licensing/
*/
"use strict";
var _renderer = require("../../../core/renderer");
var _renderer2 = _interopRequireDefault(_renderer);
var _window = require("../../../core/utils/window");
var _class = require("../../../core/class");
var _class2 = _interopRequireDefault(_class);
var _string = require("../../../core/utils/string");
var _errors = require("../../../data/errors");
var _common = require("../../../core/utils/common");
var _extend = require("../../../core/utils/extend");
var _type = require("../../../core/utils/type");
var _iterator = require("../../../core/utils/iterator");
var _array = require("../../../core/utils/array");
var _uiPivot_grid = require("../ui.pivot_grid.utils");
var _deferred = require("../../../core/utils/deferred");
var _language_codes = require("../../../localization/language_codes");
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
"default": obj
}
}
function _toConsumableArray(arr) {
if (Array.isArray(arr)) {
for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) {
arr2[i] = arr[i]
}
return arr2
} else {
return Array.from(arr)
}
}
var window = (0, _window.getWindow)();
exports.XmlaStore = _class2.default.inherit(function() {
var discover = '<Envelope xmlns="http://schemas.xmlsoap.org/soap/envelope/"><Body><Discover xmlns="urn:schemas-microsoft-com:xml-analysis"><RequestType>{2}</RequestType><Restrictions><RestrictionList><CATALOG_NAME>{0}</CATALOG_NAME><CUBE_NAME>{1}</CUBE_NAME></RestrictionList></Restrictions><Properties><PropertyList><Catalog>{0}</Catalog>{3}</PropertyList></Properties></Discover></Body></Envelope>',
execute = '<Envelope xmlns="http://schemas.xmlsoap.org/soap/envelope/"><Body><Execute xmlns="urn:schemas-microsoft-com:xml-analysis"><Command><Statement>{0}</Statement></Command><Properties><PropertyList><Catalog>{1}</Catalog><ShowHiddenCubes>True</ShowHiddenCubes><SspropInitAppName>Microsoft SQL Server Management Studio</SspropInitAppName><Timeout>3600</Timeout>{2}</PropertyList></Properties></Execute></Body></Envelope>',
mdx = "SELECT {2} FROM {0} {1} CELL PROPERTIES VALUE, FORMAT_STRING, LANGUAGE, BACK_COLOR, FORE_COLOR, FONT_FLAGS",
mdxFilterSelect = "(SELECT {0} FROM {1})",
mdxSubset = "Subset({0}, {1}, {2})",
mdxOrder = "Order({0}, {1}, {2})",
mdxWith = "{0} {1} as {2}",
mdxSlice = "WHERE ({0})",
mdxNonEmpty = "NonEmpty({0}, {1})",
mdxAxis = "{0} DIMENSION PROPERTIES PARENT_UNIQUE_NAME,HIERARCHY_UNIQUE_NAME, MEMBER_VALUE ON {1}",
mdxCrossJoin = "CrossJoin({0})",
mdxSet = "{{0}}",
MEASURE_DEMENSION_KEY = "DX_MEASURES",
MD_DIMTYPE_MEASURE = "2";
function execXMLA(requestOptions, data) {
var deferred = new _deferred.Deferred,
beforeSend = requestOptions.beforeSend,
ajaxSettings = {
url: requestOptions.url,
dataType: "text",
data: data,
headers: {
"Content-Type": "text/xml"
},
xhrFields: {},
method: "POST"
};
if ((0, _type.isFunction)(beforeSend)) {
beforeSend(ajaxSettings)
}(0, _uiPivot_grid.sendRequest)(ajaxSettings).fail(function() {
deferred.reject(arguments)
}).done(function(text) {
var parser = new window.DOMParser;
var xml;
try {
try {
xml = parser.parseFromString(text, "text/xml")
} catch (e) {
xml = void 0
}
if (!xml || xml.getElementsByTagName("parsererror").length || 0 === xml.childNodes.length) {
throw new _errors.errors.Error("E4023", text)
}
} catch (e) {
deferred.reject({
statusText: e.message,
stack: e.stack,
responseText: text
})
}
deferred.resolve(xml)
});
return deferred
}
function getLocaleIdProperty() {
var languageId = (0, _language_codes.getLanguageId)();
if (void 0 !== languageId) {
return (0, _string.format)("<LocaleIdentifier>{0}</LocaleIdentifier>", languageId)
}
return ""
}
function mdxDescendants(level, levelMember, nextLevel) {
var memberExpression = levelMember ? levelMember : level;
return "Descendants({" + memberExpression + "}, " + nextLevel + ", SELF_AND_BEFORE)"
}
function getAllMember(dimension) {
return (dimension.hierarchyName || dimension.dataField) + ".[All]"
}
function getAllMembers(field) {
var result = field.dataField + ".allMembers",
searchValue = field.searchValue;
if (searchValue) {
searchValue = searchValue.replace(/'/g, "''");
result = "Filter(" + result + ", instr(" + field.dataField + ".currentmember.member_caption,'" + searchValue + "') > 0)"
}
return result
}
function crossJoinElements(elements) {
var elementsString = elements.join(",");
return elements.length > 1 ? (0, _string.format)(mdxCrossJoin, elementsString) : elementsString
}
function union(elements) {
var elementsString = elements.join(",");
return elements.length > 1 ? "Union(" + elementsString + ")" : elementsString
}
function generateCrossJoin(path, expandLevel, expandAllCount, expandIndex, slicePath, options, axisName, take) {
var dataField, allMember, hierarchyName, arg, prevDimension, prevHierarchyName, isLastDimensionInGroup, isFirstDimensionInGroup, expandAllIndex, field, member, i, crossJoinArgs = [],
dimensions = options[axisName],
fields = [];
for (i = expandIndex; i <= expandLevel; i++) {
field = dimensions[i];
dataField = field.dataField;
prevHierarchyName = dimensions[i - 1] && dimensions[i - 1].hierarchyName;
hierarchyName = field.hierarchyName;
isLastDimensionInGroup = !hierarchyName || !dimensions[i + 1] || dimensions[i + 1].hierarchyName !== hierarchyName;
expandAllIndex = path.length + expandAllCount + expandIndex;
arg = null;
fields.push(field);
if (i < path.length) {
if (isLastDimensionInGroup) {
arg = "(" + dataField + "." + preparePathValue(path[i], dataField) + ")"
}
} else {
if (i <= expandAllIndex) {
if (0 === i && 0 === expandAllCount) {
allMember = getAllMember(dimensions[expandIndex]);
if (!hierarchyName) {
arg = getAllMembers(dimensions[expandIndex])
} else {
arg = allMember + "," + dimensions[expandIndex].dataField
}
} else {
if (hierarchyName) {
member = preparePathValue(slicePath[slicePath.length - 1]);
if (isLastDimensionInGroup || i === expandAllIndex) {
if (prevHierarchyName === hierarchyName) {
if (slicePath.length) {
prevDimension = dimensions[slicePath.length - 1]
}
if (!prevDimension || prevDimension.hierarchyName !== hierarchyName) {
prevDimension = dimensions[i - 1];
member = ""
}
arg = mdxDescendants(prevDimension.dataField, member, dataField)
} else {
arg = getAllMembers(field)
}
}
} else {
arg = getAllMembers(field)
}
}
} else {
isFirstDimensionInGroup = !hierarchyName || prevHierarchyName !== hierarchyName;
if (isFirstDimensionInGroup) {
arg = "(" + getAllMember(field) + ")"
}
}
}
if (arg) {
arg = (0, _string.format)(mdxSet, arg);
if (take) {
var sortBy = (field.hierarchyName || field.dataField) + ("displayText" === field.sortBy ? ".MEMBER_CAPTION" : ".MEMBER_VALUE");
arg = (0, _string.format)(mdxOrder, arg, sortBy, "desc" === field.sortOrder ? "DESC" : "ASC")
}
crossJoinArgs.push(arg)
}
}
return crossJoinElements(crossJoinArgs)
}
function fillCrossJoins(crossJoins, path, expandLevel, expandIndex, slicePath, options, axisName, cellsString, take, totalsOnly) {
var dimensionIndex, expandAllCount = -1,
dimensions = options[axisName];
do {
expandAllCount++;
dimensionIndex = path.length + expandAllCount + expandIndex;
var crossJoin = generateCrossJoin(path, expandLevel, expandAllCount, expandIndex, slicePath, options, axisName, take);
if (!take && !totalsOnly) {
crossJoin = (0, _string.format)(mdxNonEmpty, crossJoin, cellsString)
}
crossJoins.push(crossJoin)
} while (dimensions[dimensionIndex] && dimensions[dimensionIndex + 1] && dimensions[dimensionIndex].expanded)
}
function declare(expression, withArray, name, type) {
name = name || "[DX_Set_" + withArray.length + "]";
type = type || "set";
withArray.push((0, _string.format)(mdxWith, type, name, expression));
return name
}
function generateAxisMdx(options, axisName, cells, withArray, parseOptions) {
var dimensions = options[axisName],
crossJoins = [],
path = [],
expandedPaths = [],
expandIndex = 0,
expandLevel = 0,
result = [],
cellsString = (0, _string.format)(mdxSet, cells.join(","));
if (dimensions && dimensions.length) {
if (options.headerName === axisName) {
path = options.path;
expandIndex = path.length
} else {
if (options.headerName && options.oppositePath) {
path = options.oppositePath;
expandIndex = path.length
} else {
expandedPaths = ("columns" === axisName ? options.columnExpandedPaths : options.rowExpandedPaths) || expandedPaths
}
}
expandLevel = (0, _uiPivot_grid.getExpandedLevel)(options, axisName);
fillCrossJoins(crossJoins, [], expandLevel, expandIndex, path, options, axisName, cellsString, "rows" === axisName ? options.rowTake : options.columnTake, options.totalsOnly);
(0, _iterator.each)(expandedPaths, function(_, expandedPath) {
fillCrossJoins(crossJoins, expandedPath, expandLevel, expandIndex, expandedPath, options, axisName, cellsString)
});
for (var i = expandLevel; i >= path.length; i--) {
if (dimensions[i].hierarchyName) {
parseOptions.visibleLevels[dimensions[i].hierarchyName] = parseOptions.visibleLevels[dimensions[i].hierarchyName] || [];
parseOptions.visibleLevels[dimensions[i].hierarchyName].push(dimensions[i].dataField)
}
}
}
if (crossJoins.length) {
var expression = union(crossJoins);
if ("rows" === axisName && options.rowTake) {
expression = (0, _string.format)(mdxSubset, expression, options.rowSkip > 0 ? options.rowSkip + 1 : 0, options.rowSkip > 0 ? options.rowTake : options.rowTake + 1)
}
if ("columns" === axisName && options.columnTake) {
expression = (0, _string.format)(mdxSubset, expression, options.columnSkip > 0 ? options.columnSkip + 1 : 0, options.columnSkip > 0 ? options.columnTake : options.columnTake + 1)
}
var axisSet = "[DX_" + axisName + "]";
result.push(declare(expression, withArray, axisSet));
if (options.totalsOnly) {
result.push(declare("COUNT(" + axisSet + ")", withArray, "[DX_" + axisName + "_count]", "member"))
}
}
if ("columns" === axisName && cells.length && !options.skipValues) {
result.push(cellsString)
}
return (0, _string.format)(mdxAxis, crossJoinElements(result), axisName)
}
function generateAxisFieldsFilter(fields) {
var filterMembers = [];
(0, _iterator.each)(fields, function(_, field) {
var filterStringExpression, dataField = field.dataField,
filterExpression = [],
filterValues = field.filterValues || [];
if (field.hierarchyName && (0, _type.isNumeric)(field.groupIndex)) {
return
}(0, _iterator.each)(filterValues, function(_, filterValue) {
var filterMdx = dataField + "." + preparePathValue(Array.isArray(filterValue) ? filterValue[filterValue.length - 1] : filterValue, dataField);
if ("exclude" === field.filterType) {
filterExpression.push(filterMdx + ".parent");
filterMdx = "Descendants(" + filterMdx + ")"
}
filterExpression.push(filterMdx)
});
if (filterValues.length) {
filterStringExpression = (0, _string.format)(mdxSet, filterExpression.join(","));
if ("exclude" === field.filterType) {
filterStringExpression = "Except(" + getAllMembers(field) + "," + filterStringExpression + ")"
}
filterMembers.push(filterStringExpression)
}
});
return filterMembers.length ? crossJoinElements(filterMembers) : ""
}
function generateFrom(columnsFilter, rowsFilter, filter, cubeName) {
var from = "[" + cubeName + "]";
(0, _iterator.each)([columnsFilter, rowsFilter, filter], function(_, filter) {
if (filter) {
from = (0, _string.format)(mdxFilterSelect, filter + "on 0", from)
}
});
return from
}
function generateMdxCore(axisStrings, withArray, columns, rows, filters, slice, cubeName) {
var options = arguments.length > 7 && void 0 !== arguments[7] ? arguments[7] : {};
var mdxString = "",
withString = (withArray.length ? "with " + withArray.join(" ") : "") + " ";
if (axisStrings.length) {
var select = void 0;
if (options.totalsOnly) {
var countMembers = [];
if (rows.length) {
countMembers.push("[DX_rows_count]")
}
if (columns.length) {
countMembers.push("[DX_columns_count]")
}
select = "{" + countMembers.join(",") + "} on columns"
} else {
select = axisStrings.join(",")
}
mdxString = withString + (0, _string.format)(mdx, generateFrom(generateAxisFieldsFilter(columns), generateAxisFieldsFilter(rows), generateAxisFieldsFilter(filters || []), cubeName), slice.length ? (0, _string.format)(mdxSlice, slice.join(",")) : "", select)
}
return mdxString
}
function prepareDataFields(withArray, valueFields) {
return (0, _iterator.map)(valueFields, function(cell) {
if ((0, _type.isString)(cell.expression)) {
declare(cell.expression, withArray, cell.dataField, "member")
}
return cell.dataField
})
}
function addSlices(slices, options, headerName, path) {
(0, _iterator.each)(path, function(index, value) {
var dimension = options[headerName][index];
if (!dimension.hierarchyName || dimension.hierarchyName !== options[headerName][index + 1].hierarchyName) {
slices.push(dimension.dataField + "." + preparePathValue(value, dimension.dataField))
}
})
}
function generateMDX(options, cubeName, parseOptions) {
var columns = options.columns || [],
rows = options.rows || [],
values = options.values && options.values.length ? options.values : [{
dataField: "[Measures]"
}],
slice = [],
withArray = [],
axisStrings = [],
dataFields = prepareDataFields(withArray, values);
parseOptions.measureCount = options.skipValues ? 1 : values.length;
parseOptions.visibleLevels = {};
if (options.headerName && options.path) {
addSlices(slice, options, options.headerName, options.path)
}
if (options.headerName && options.oppositePath) {
addSlices(slice, options, "rows" === options.headerName ? "columns" : "rows", options.oppositePath)
}
if (columns.length || dataFields.length) {
axisStrings.push(generateAxisMdx(options, "columns", dataFields, withArray, parseOptions))
}
if (rows.length) {
axisStrings.push(generateAxisMdx(options, "rows", dataFields, withArray, parseOptions))
}
return generateMdxCore(axisStrings, withArray, columns, rows, options.filters, slice, cubeName, options)
}
function createDrillDownAxisSlice(slice, fields, path) {
(0, _iterator.each)(path, function(index, value) {
var field = fields[index];
if (field.hierarchyName && (fields[index + 1] || {}).hierarchyName === field.hierarchyName) {
return
}
slice.push(field.dataField + "." + preparePathValue(value, field.dataField))
})
}
function generateDrillDownMDX(options, cubeName, params) {
var coreMDX, columns = options.columns || [],
rows = options.rows || [],
values = options.values && options.values.length ? options.values : [{
dataField: "[Measures]"
}],
slice = [],
withArray = [],
axisStrings = [],
dataFields = prepareDataFields(withArray, values),
maxRowCount = params.maxRowCount,
customColumns = params.customColumns || [],
customColumnsString = customColumns.length > 0 ? " return " + customColumns.join(",") : "";
createDrillDownAxisSlice(slice, columns, params.columnPath || []);
createDrillDownAxisSlice(slice, rows, params.rowPath || []);
if (columns.length || columns.length || dataFields.length) {
axisStrings.push([(dataFields[params.dataIndex] || dataFields[0]) + " on 0"])
}
coreMDX = generateMdxCore(axisStrings, withArray, columns, rows, options.filters, slice, cubeName);
return coreMDX ? "drillthrough" + (maxRowCount > 0 ? " maxrows " + maxRowCount : "") + coreMDX + customColumnsString : coreMDX
}
function getNumber(str) {
return parseInt(str, 10)
}
function parseValue(valueText) {
return (0, _type.isNumeric)(valueText) ? parseFloat(valueText) : valueText
}
function getFirstChild(node, tagName) {
return (node.getElementsByTagName(tagName) || [])[0]
}
function getFirstChildText(node, childTagName) {
return getNodeText(getFirstChild(node, childTagName))
}
function parseAxes(xml, skipValues) {
var axes = [];
(0, _iterator.each)(xml.getElementsByTagName("Axis"), function(_, axisElement) {
var name = axisElement.getAttribute("name"),
axis = [],
index = 0;
if (0 === name.indexOf("Axis") && (0, _type.isNumeric)(getNumber(name.substr(4)))) {
axes.push(axis);
(0, _iterator.each)(axisElement.getElementsByTagName("Tuple"), function(_, tupleElement) {
var tuple, level, i, tupleMembers = tupleElement.childNodes,
levelSum = 0,
members = [],
membersCount = skipValues ? tupleMembers.length : tupleMembers.length - 1,
isAxisWithMeasure = 1 === axes.length;
if (isAxisWithMeasure) {
membersCount--
}
axis.push(members);
for (i = membersCount; i >= 0; i--) {
tuple = tupleMembers[i];
level = getNumber(getFirstChildText(tuple, "LNum"));
members[i] = {
caption: getFirstChildText(tuple, "Caption"),
value: parseValue(getFirstChildText(tuple, "MEMBER_VALUE")),
level: level,
index: index++,
hasValue: !levelSum && (!!level || 0 === i),
name: getFirstChildText(tuple, "UName"),
hierarchyName: tupleMembers[i].getAttribute("Hierarchy"),
parentName: getFirstChildText(tuple, "PARENT_UNIQUE_NAME"),
levelName: getFirstChildText(tuple, "LName")
};
levelSum += level
}
})
}
});
while (axes.length < 2) {
axes.push([
[{
level: 0
}]
])
}
return axes
}
function getNodeText(node) {
return node && node && (node.textContent || node.text || node.innerHTML) || ""
}
function parseCells(xml, axes, measureCount) {
var measureIndex, row, cells = [],
cell = [],
index = 0,
cellsOriginal = [],
cellElements = xml.getElementsByTagName("Cell"),
errorDictionary = {};
for (var i = 0; i < cellElements.length; i++) {
var xmlCell = cellElements[i],
valueElement = xmlCell.getElementsByTagName("Value")[0],
errorElements = valueElement && valueElement.getElementsByTagName("Error") || [],
text = 0 === errorElements.length ? getNodeText(valueElement) : "#N/A",
value = parseFloat(text),
isNumeric = text - value + 1 > 0,
cellOrdinal = getNumber(xmlCell.getAttribute("CellOrdinal"));
if (errorElements.length) {
errorDictionary[getNodeText(errorElements[0].getElementsByTagName("ErrorCode")[0])] = getNodeText(errorElements[0].getElementsByTagName("Description")[0])
}
cellsOriginal[cellOrdinal] = {
value: isNumeric ? value : text || null
}
}(0, _iterator.each)(axes[1], function() {
row = [];
cells.push(row);
(0, _iterator.each)(axes[0], function() {
measureIndex = index % measureCount;
if (0 === measureIndex) {
cell = [];
row.push(cell)
}
cell.push(cellsOriginal[index] ? cellsOriginal[index].value : null);
index++
})
});
Object.keys(errorDictionary).forEach(function(key) {
_errors.errors.log("W4002", errorDictionary[key])
});
return cells
}
function preparePathValue(pathValue, dataField) {
if (pathValue) {
pathValue = (0, _type.isString)(pathValue) && pathValue.indexOf("&") !== -1 ? pathValue : "[" + pathValue + "]";
if (dataField && 0 === pathValue.indexOf(dataField + ".")) {
pathValue = pathValue.slice(dataField.length + 1, pathValue.length)
}
}
return pathValue
}
function getItem(hash, name, member, index) {
var item = hash[name];
if (!item) {
item = {};
hash[name] = item
}
if (!(0, _type.isDefined)(item.value) && member) {
item.text = member.caption;
item.value = member.value;
item.key = name ? name : "";
item.levelName = member.levelName;
item.hierarchyName = member.hierarchyName;
item.parentName = member.parentName;
item.index = index;
item.level = member.level
}
return item
}
function getVisibleChildren(item, visibleLevels) {
var result = [],
children = item.children && (item.children.length ? item.children : Object.keys(item.children.grandTotalHash || {}).reduce(function(result, name) {
return result.concat(item.children.grandTotalHash[name].children)
}, [])),
firstChild = children && children[0];
if (firstChild && (visibleLevels[firstChild.hierarchyName] && (0, _array.inArray)(firstChild.levelName, visibleLevels[firstChild.hierarchyName]) !== -1 || !visibleLevels[firstChild.hierarchyName] || 0 === firstChild.level)) {
var newChildren = children.filter(function(child) {
return child.hierarchyName === firstChild.hierarchyName
});
newChildren.grandTotalHash = children.grandTotalHash;
return newChildren
} else {
if (firstChild) {
for (var i = 0; i < children.length; i++) {
if (children[i].hierarchyName === firstChild.hierarchyName) {
result.push.apply(result, getVisibleChildren(children[i], visibleLevels))
}
}
}
}
return result
}
function processMember(dataIndex, member, parentItem) {
var currentItem, children = parentItem.children = parentItem.children || [],
hash = children.hash = children.hash || {},
grandTotalHash = children.grandTotalHash = children.grandTotalHash || {};
if (member.parentName) {
parentItem = getItem(hash, member.parentName);
children = parentItem.children = parentItem.children || []
}
currentItem = getItem(hash, member.name, member, dataIndex);
if (member.hasValue && !currentItem.added) {
currentItem.index = dataIndex;
currentItem.added = true;
children.push(currentItem)
}
if ((!parentItem.value || !parentItem.parentName) && member.parentName) {
grandTotalHash[member.parentName] = parentItem
} else {
if (grandTotalHash[parentItem.name]) {
delete grandTotalHash[member.parentName]
}
}
return currentItem
}
function getGrandTotalIndex(parentItem, visibleLevels) {
var grandTotalIndex;
if (1 === parentItem.children.length && "" === parentItem.children[0].parentName) {
grandTotalIndex = parentItem.children[0].index;
var grandTotalHash = parentItem.children.grandTotalHash;
parentItem.children = parentItem.children[0].children || [];
parentItem.children.grandTotalHash = grandTotalHash;
parentItem.children = getVisibleChildren(parentItem, visibleLevels)
} else {
if (0 === parentItem.children.length) {
grandTotalIndex = 0
}
}
return grandTotalIndex
}
function fillDataSourceAxes(dataSourceAxis, axisTuples, measureCount, visibleLevels) {
var grandTotalIndex, result = [];
(0, _iterator.each)(axisTuples, function(tupleIndex, members) {
var parentItem = {
children: result
},
dataIndex = (0, _type.isDefined)(measureCount) ? Math.floor(tupleIndex / measureCount) : tupleIndex;
(0, _iterator.each)(members, function(_, member) {
parentItem = processMember(dataIndex, member, parentItem)
})
});
var parentItem = {
children: result
};
parentItem.children = getVisibleChildren(parentItem, visibleLevels);
grandTotalIndex = getGrandTotalIndex(parentItem, visibleLevels);
(0, _uiPivot_grid.foreachTree)(parentItem.children, function(items) {
var item = items[0],
children = getVisibleChildren(item, visibleLevels);
if (children.length) {
item.children = children
} else {
delete item.children
}
delete item.levelName;
delete item.hierarchyName;
delete item.added;
delete item.parentName;
delete item.level
}, true);
(0, _iterator.each)(parentItem.children || [], function(_, e) {
dataSourceAxis.push(e)
});
return grandTotalIndex
}
function checkError(xml) {
var description, error, faultElementNS = xml.getElementsByTagName("soap:Fault"),
faultElement = xml.getElementsByTagName("Fault"),
errorElement = (0, _renderer2.default)([].slice.call(faultElement.length ? faultElement : faultElementNS)).find("Error");
if (errorElement.length) {
description = errorElement.attr("Description");
error = new _errors.errors.Error("E4000", description);
_errors.errors.log("E4000", description);
return error
}
return null
}
function parseResult(xml, parseOptions) {
var axes, dataSource = {
columns: [],
rows: []
},
measureCount = parseOptions.measureCount;
axes = parseAxes(xml, parseOptions.skipValues);
dataSource.grandTotalColumnIndex = fillDataSourceAxes(dataSource.columns, axes[0], measureCount, parseOptions.visibleLevels);
dataSource.grandTotalRowIndex = fillDataSourceAxes(dataSource.rows, axes[1], void 0, parseOptions.visibleLevels);
dataSource.values = parseCells(xml, axes, measureCount);
return dataSource
}
function parseDiscoverRowSet(xml, schema, dimensions, translatedDisplayFolders) {
var result = [],
isMeasure = "MEASURE" === schema,
displayFolderField = isMeasure ? "MEASUREGROUP_NAME" : schema + "_DISPLAY_FOLDER";
(0, _iterator.each)(xml.getElementsByTagName("row"), function(_, row) {
var hierarchyName = "LEVEL" === schema ? getFirstChildText(row, "HIERARCHY_UNIQUE_NAME") : void 0,
levelNumber = getFirstChildText(row, "LEVEL_NUMBER"),
displayFolder = getFirstChildText(row, displayFolderField);
if (isMeasure) {
displayFolder = translatedDisplayFolders[displayFolder] || displayFolder
}
if (("0" !== levelNumber || "true" !== getFirstChildText(row, schema + "_IS_VISIBLE")) && getFirstChildText(row, "DIMENSION_TYPE") !== MD_DIMTYPE_MEASURE) {
var dimension = isMeasure ? MEASURE_DEMENSION_KEY : getFirstChildText(row, "DIMENSION_UNIQUE_NAME"),
dataField = getFirstChildText(row, schema + "_UNIQUE_NAME");
result.push({
dimension: dimensions.names[dimension] || dimension,
groupIndex: levelNumber ? getNumber(levelNumber) - 1 : void 0,
dataField: dataField,
caption: getFirstChildText(row, schema + "_CAPTION"),
hierarchyName: hierarchyName,
groupName: hierarchyName,
displayFolder: displayFolder,
isMeasure: isMeasure,
isDefault: !!dimensions.defaultHierarchies[dataField]
})
}
});
return result
}
function parseMeasureGroupDiscoverRowSet(xml) {
var measureGroups = {};
(0, _iterator.each)(xml.getElementsByTagName("row"), function(_, row) {
measureGroups[getFirstChildText(row, "MEASUREGROUP_NAME")] = getFirstChildText(row, "MEASUREGROUP_CAPTION")
});
return measureGroups
}
function parseDimensionsDiscoverRowSet(xml) {
var result = {
names: {},
defaultHierarchies: {}
};
(0, _iterator.each)((0, _renderer2.default)(xml).find("row"), function() {
var $row = (0, _renderer2.default)(this),
type = $row.children("DIMENSION_TYPE").text(),
dimensionName = type === MD_DIMTYPE_MEASURE ? MEASURE_DEMENSION_KEY : $row.children("DIMENSION_UNIQUE_NAME").text();
result.names[dimensionName] = $row.children("DIMENSION_CAPTION").text();
result.defaultHierarchies[$row.children("DEFAULT_HIERARCHY").text()] = true
});
return result
}
function | (str) {
str = str.replace(/_x(....)_/g, function(whole, group1) {
return String.fromCharCode(parseInt(group1, 16))
});
var stringArray = str.match(/\[.+?\]/gi);
if (stringArray && stringArray.length) {
str = stringArray[stringArray.length - 1]
}
return str.replace(/\[/gi, "").replace(/\]/gi, "").replace(/\$/gi, "").replace(/\./gi, " ")
}
function parseDrillDownRowSet(xml) {
var rows = xml.getElementsByTagName("row"),
result = [],
columnNames = {};
for (var i = 0; i < rows.length; i++) {
var children = rows[i].childNodes,
item = {};
for (var j = 0; j < children.length; j++) {
var tagName = children[j].tagName,
name = columnNames[tagName] = columnNames[tagName] || parseStringWithUnicodeSymbols(tagName);
item[name] = getNodeText(children[j])
}
result.push(item)
}
return result
}
function sendQuery(storeOptions, mdxString) {
mdxString = (0, _renderer2.default)("<div>").text(mdxString).html();
return execXMLA(storeOptions, (0, _string.format)(execute, mdxString, storeOptions.catalog, getLocaleIdProperty()))
}
function processTotalCount(data, options, totalCountXml) {
var axes = [];
var columnOptions = options.columns || [];
var rowOptions = options.rows || [];
if (columnOptions.length) {
axes.push({})
}
if (rowOptions.length) {
axes.push({})
}
var cells = parseCells(totalCountXml, [
[{}],
[{}, {}]
], 1);
if (!columnOptions.length && rowOptions.length) {
data.rowCount = Math.max(cells[0][0][0] - 1, 0)
}
if (!rowOptions.length && columnOptions.length) {
data.columnCount = Math.max(cells[0][0][0] - 1, 0)
}
if (rowOptions.length && columnOptions.length) {
data.rowCount = Math.max(cells[0][0][0] - 1, 0);
data.columnCount = Math.max(cells[1][0][0] - 1, 0)
}
if (void 0 !== data.rowCount && options.rowTake) {
data.rows = [].concat(_toConsumableArray(Array(options.rowSkip))).concat(data.rows);
data.rows.length = data.rowCount;
for (var i = 0; i < data.rows.length; i++) {
data.rows[i] = data.rows[i] || {}
}
}
if (void 0 !== data.columnCount && options.columnTake) {
data.columns = [].concat(_toConsumableArray(Array(options.columnSkip))).concat(data.columns);
data.columns.length = data.columnCount;
for (var _i = 0; _i < data.columns.length; _i++) {
data.columns[_i] = data.columns[_i] || {}
}
}
}
return {
ctor: function(options) {
this._options = options
},
getFields: function() {
var options = this._options,
catalog = options.catalog,
cube = options.cube,
localeIdProperty = getLocaleIdProperty(),
dimensionsRequest = execXMLA(options, (0, _string.format)(discover, catalog, cube, "MDSCHEMA_DIMENSIONS", localeIdProperty)),
measuresRequest = execXMLA(options, (0, _string.format)(discover, catalog, cube, "MDSCHEMA_MEASURES", localeIdProperty)),
hierarchiesRequest = execXMLA(options, (0, _string.format)(discover, catalog, cube, "MDSCHEMA_HIERARCHIES", localeIdProperty)),
levelsRequest = execXMLA(options, (0, _string.format)(discover, catalog, cube, "MDSCHEMA_LEVELS", localeIdProperty)),
result = new _deferred.Deferred;
(0, _deferred.when)(dimensionsRequest, measuresRequest, hierarchiesRequest, levelsRequest).then(function(dimensionsResponse, measuresResponse, hierarchiesResponse, levelsResponse) {
execXMLA(options, (0, _string.format)(discover, catalog, cube, "MDSCHEMA_MEASUREGROUPS", localeIdProperty)).done(function(measureGroupsResponse) {
var dimensions = parseDimensionsDiscoverRowSet(dimensionsResponse),
hierarchies = parseDiscoverRowSet(hierarchiesResponse, "HIERARCHY", dimensions),
levels = parseDiscoverRowSet(levelsResponse, "LEVEL", dimensions),
measureGroups = parseMeasureGroupDiscoverRowSet(measureGroupsResponse),
fields = parseDiscoverRowSet(measuresResponse, "MEASURE", dimensions, measureGroups).concat(hierarchies),
levelsByHierarchy = {};
(0, _iterator.each)(levels, function(_, level) {
levelsByHierarchy[level.hierarchyName] = levelsByHierarchy[level.hierarchyName] || [];
levelsByHierarchy[level.hierarchyName].push(level)
});
(0, _iterator.each)(hierarchies, function(_, hierarchy) {
if (levelsByHierarchy[hierarchy.dataField] && levelsByHierarchy[hierarchy.dataField].length > 1) {
hierarchy.groupName = hierarchy.hierarchyName = hierarchy.dataField;
fields.push.apply(fields, levelsByHierarchy[hierarchy.hierarchyName])
}
});
result.resolve(fields)
}).fail(result.reject)
}).fail(result.reject);
return result
},
load: function load(options) {
var result = new _deferred.Deferred,
storeOptions = this._options,
parseOptions = {
skipValues: options.skipValues
},
mdxString = generateMDX(options, storeOptions.cube, parseOptions);
var rowCountMdx = void 0;
if (options.rowSkip || options.rowTake || options.columnTake || options.columnSkip) {
rowCountMdx = generateMDX((0, _extend.extend)({}, options, {
totalsOnly: true,
rowSkip: null,
rowTake: null,
columnSkip: null,
columnTake: null
}), storeOptions.cube, {})
}
var load = function() {
if (mdxString) {
(0, _deferred.when)(sendQuery(storeOptions, mdxString), rowCountMdx && sendQuery(storeOptions, rowCountMdx)).done(function(executeXml, rowCountXml) {
var error = checkError(executeXml) || rowCountXml && checkError(rowCountXml);
if (!error) {
var response = parseResult(executeXml, parseOptions);
if (rowCountXml) {
processTotalCount(response, options, rowCountXml)
}
result.resolve(response)
} else {
result.reject(error)
}
}).fail(result.reject)
} else {
result.resolve({
columns: [],
rows: [],
values: [],
grandTotalColumnIndex: 0,
grandTotalRowIndex: 0
})
}
};
if (options.delay) {
setTimeout(load, options.delay)
} else {
load()
}
return result
},
supportPaging: function() {
return true
},
getDrillDownItems: function(options, params) {
var result = new _deferred.Deferred,
storeOptions = this._options,
mdxString = generateDrillDownMDX(options, storeOptions.cube, params);
if (mdxString) {
(0, _deferred.when)(sendQuery(storeOptions, mdxString)).done(function(executeXml) {
var error = checkError(executeXml);
if (!error) {
result.resolve(parseDrillDownRowSet(executeXml))
} else {
result.reject(error)
}
}).fail(result.reject)
} else {
result.resolve([])
}
return result
},
key: _common.noop,
filter: _common.noop
}
}()).include(_uiPivot_grid.storeDrillDownMixin);
| parseStringWithUnicodeSymbols |
abstractProvisioner.py | # Copyright (C) 2015-2016 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from builtins import object
import logging
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from bd2k.util.retry import never
from future.utils import with_metaclass
log = logging.getLogger(__name__)
Shape = namedtuple("_Shape", "wallTime memory cores disk preemptable")
"""
Represents a job or a node's "shape", in terms of the dimensions of memory, cores, disk and
wall-time allocation.
The wallTime attribute stores the number of seconds of a node allocation, e.g. 3600 for AWS,
or 60 for Azure. FIXME: and for jobs?
The memory and disk attributes store the number of bytes required by a job (or provided by a
node) in RAM or on disk (SSD or HDD), respectively.
"""
class AbstractProvisioner(with_metaclass(ABCMeta, object)):
"""
An abstract base class to represent the interface for provisioning worker nodes to use in a
Toil cluster.
"""
def __init__(self, config=None):
"""
Initialize provisioner. If config and batchSystem are not specified, the
provisioner is being used to manage nodes without a workflow
:param config: Config from common.py
:param batchSystem: The batchSystem used during run
"""
self.config = config
self.stop = False
self.staticNodesDict = {} # dict with keys of nodes private IPs, val is nodeInfo
self.static = {}
def getStaticNodes(self, preemptable):
return self.static[preemptable]
@staticmethod
def retryPredicate(e):
"""
Return true if the exception e should be retried by the cluster scaler.
For example, should return true if the exception was due to exceeding an API rate limit.
The error will be retried with exponential backoff.
:param e: exception raised during execution of setNodeCount
:return: boolean indicating whether the exception e should be retried
"""
return never(e)
def setStaticNodes(self, nodes, preemptable):
"""
Used to track statically provisioned nodes. These nodes are
treated differently than autoscaled nodes in that they should not
be automatically terminated.
:param nodes: list of Node objects
"""
prefix = 'non-' if not preemptable else ''
log.debug("Adding %s to %spreemptable static nodes", nodes, prefix)
if nodes is not None:
self.static[preemptable] = {node.privateIP : node for node in nodes}
@abstractmethod
def addNodes(self, nodeType, numNodes, preemptable):
"""
Used to add worker nodes to the cluster
:param numNodes: The number of nodes to add
:param preemptable: whether or not the nodes will be preemptable
:return: number of nodes successfully added
"""
raise NotImplementedError
@abstractmethod
def terminateNodes(self, nodes):
"""
Terminate the nodes represented by given Node objects
:param nodes: list of Node objects
"""
raise NotImplementedError
@abstractmethod
def getProvisionedWorkers(self, nodeType, preemptable):
"""
Gets all nodes of the given preemptability from the provisioner.
Includes both static and autoscaled nodes.
:param preemptable: Boolean value indicating whether to return preemptable nodes or
non-preemptable nodes
:return: list of Node objects
"""
raise NotImplementedError
@abstractmethod
def remainingBillingInterval(self, node):
"""
Calculate how much of a node's allocated billing interval is
left in this cycle.
:param node: Node object
:return: float from 0 -> 1.0 representing percentage of pre-paid time left in cycle
"""
raise NotImplementedError
@abstractmethod
def getNodeShape(self, nodeType=None, preemptable=False):
"""
The shape of a preemptable or non-preemptable node managed by this provisioner. The node
shape defines key properties of a machine, such as its number of cores or the time
between billing intervals.
:param str nodeType: Node type name to return the shape of.
:rtype: Shape
"""
raise NotImplementedError
@classmethod
@abstractmethod
def rsyncLeader(cls, clusterName, args, **kwargs):
"""
Rsyncs to the leader of the cluster with the specified name. The arguments are passed directly to
Rsync.
:param clusterName: name of the cluster to target
:param args: list of string arguments to rsync. Identical to the normal arguments to rsync, but the
host name of the remote host can be omitted. ex) ['/localfile', ':/remotedest']
:param \**kwargs:
See below
:Keyword Arguments:
* *strict*: if False, strict host key checking is disabled. (Enabled by default.)
"""
raise NotImplementedError
@classmethod
@abstractmethod
def launchCluster(cls, instanceType, keyName, clusterName, spotBid=None):
"""
Launches a cluster with the specified instance type for the leader with the specified name.
:param instanceType: desired type of the leader instance
:param keyName: name of the ssh key pair to launch the instance with
:param clusterName: desired identifier of the cluster
:param spotBid: how much to bid for the leader instance. If none, use on demand pricing.
:return:
"""
raise NotImplementedError
@classmethod
@abstractmethod
def sshLeader(cls, clusterName, args, **kwargs):
|
@classmethod
@abstractmethod
def destroyCluster(cls, clusterName):
"""
Terminates all nodes in the specified cluster and cleans up all resources associated with the
cluser.
:param clusterName: identifier of the cluster to terminate.
"""
raise NotImplementedError
| """
SSH into the leader instance of the specified cluster with the specified arguments to SSH.
:param clusterName: name of the cluster to target
:param args: list of string arguments to ssh.
:param strict: If False, strict host key checking is disabled. (Enabled by default.)
"""
raise NotImplementedError |
_discrete_tune.py | __all__ = ["DiscreteTune"]
from typing import Dict, Tuple, Optional
import WrightTools as wt
class DiscreteTune:
def __init__(
self, ranges: Dict[str, Tuple[float, float]], default: Optional[str] = None, **kwargs
):
"""A Tune which maps one set of inputs to associated output points.
Currently all tunes are assumed to have "nm" as their independent units.
Parameters
----------
ranges: dict[str, tuple[float, float]]
dictionary mapping the key (string identifier of a discrete position)
to a 2-tuple of (min, max) for the range for which that identifier should be used.
This dict is ordered, the first result with a matching range (inclusive of boundaries)
will be the one returned when called.
default: Optional[str]
The result to return if no matching range is represented.
Default is None
Note: kwargs are provided to make the serialized dictionary with ind_units
easy to initialize into a DiscreteTune object, but are currently ignored.
"""
self._ind_units = "nm"
self._ranges = {k: tuple(v) for k, v in ranges.items()}
self._default = default
def __repr__(self):
return f"DiscreteTune({repr(self.ranges)}, {repr(self.default)})"
def __call__(self, ind_value, *, ind_units=None, dep_units=None):
if ind_units is not None and self._ind_units is not None:
ind_value = wt.units.convert(ind_value, ind_units, self._ind_units)
for key, (min, max) in self.ranges.items():
if min <= ind_value <= max:
return key
return self.default
def __eq__(self, other):
return self.ranges == other.ranges and self.default == other.default
def as_dict(self):
"""Serialize this Tune as a python dictionary."""
out = {}
out["ranges"] = self.ranges
out["ind_units"] = self.ind_units
out["default"] = self.default
return out
@property
def | (self):
"""The ranges for discrete setpoints."""
return self._ranges
@property
def ind_units(self):
"""The units of the independent (input) values."""
return self._ind_units
@property
def default(self):
"""The value returned if no supplied range applies."""
return self._default
| ranges |
day_18.rs | extern crate advent_of_code_2017;
use advent_of_code_2017::*;
use std::str::FromStr;
use std::collections::HashMap;
use std::sync::mpsc::*;
fn main() {
let args = AdventArgs::init();
let instructions: Vec<Instruction> = args.input.iter()
.map(|line| line.parse().unwrap())
.collect();
let (sender0, receiver0) = channel();
let (sender1, receiver1) = channel();
let mut program0 = Program::new(0, instructions.clone(), sender0, receiver1, args.part == 1);
if args.part == 1 {
program0.run();
let mut answer = 0;
while let Ok(x) = receiver0.try_recv() {
answer = x;
}
println!("Last sent value: {}", answer);
} else {
let mut program1 = Program::new(1, instructions.clone(), sender1, receiver0, args.part == 1);
while !(program0.terminated && program1.terminated) && (program0.run() || program1.run()) {
}
println!("Program 0 sent {} messages", program0.sent_count);
println!("Program 1 sent {} messages", program1.sent_count);
}
}
struct Program {
instructions: Vec<Instruction>,
registers: HashMap<char, i64>,
pc: i64,
terminated: bool,
sender: Sender<i64>,
sent_count: u64,
receiver: Receiver<i64>,
part1: bool
}
impl Program {
fn new(process_id: i64, instructions: Vec<Instruction>, sender: Sender<i64>, receiver: Receiver<i64>, part1: bool) -> Program {
let mut reg = HashMap::new();
if !part1 {
reg.insert('p', process_id);
}
Program {
instructions: instructions,
registers: reg,
pc: 0,
terminated: false,
sender: sender,
sent_count: 0,
receiver: receiver,
part1: part1
}
}
fn run(&mut self) -> bool {
use Instruction::*;
let mut blocked = false;
let mut did_something = false;
while !blocked && !self.terminated {
if self.pc < 0 || self.pc as usize >= self.instructions.len() {
self.terminated = true;
}
else {
let ins = self.instructions[self.pc as usize].clone();
match ins {
Snd(x) => {
self.sent_count += 1;
self.sender.send(self.get(x)).ok();
},
Set(x, y) => {
let y_val = self.get(y);
self.set(x, y_val);
},
Add(x, y) => {
let x_val = self.get(x);
let y_val = self.get(y);
self.set(x, x_val + y_val);
},
Mul(x, y) => {
let x_val = self.get(x);
let y_val = self.get(y);
self.set(x, x_val * y_val);
},
Mod(x, y) => {
let x_val = self.get(x);
let y_val = self.get(y);
self.set(x, x_val % y_val);
},
Rcv(x) => {
if self.part1 {
blocked = self.get(x) != 0;
} else {
match self.receiver.try_recv() {
Ok(y) => {
self.set(x, y);
},
Err(_) => {
blocked = true;
return did_something;
}
}
}
},
Jgz(x, y) => {
if self.get(x) > 0 {
self.pc = self.pc + self.get(y) - 1;
}
},
}
self.pc += 1;
did_something = true;
}
}
true
}
fn get(&self, register: Data) -> i64 {
use Data::*;
match register {
Register(c) => self.registers.get(&c).cloned().unwrap_or(0),
Literal(i) => i
}
}
fn set(&mut self, register: Data, value: i64) |
}
#[derive(Debug, Clone)]
enum Instruction {
Snd(Data),
Set(Data, Data),
Add(Data, Data),
Mul(Data, Data),
Mod(Data, Data),
Rcv(Data),
Jgz(Data, Data)
}
impl FromStr for Instruction {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use Instruction::*;
let mut str_iter = s.split_whitespace();
let ins = str_iter.next();
let x = str_iter.next().map(|x| x.parse::<Data>());
let y = str_iter.next().map(|x| x.parse::<Data>());
match (ins, x, y) {
(Some("snd"), Some(Ok(x)), _) => Ok(Snd(x)),
(Some("set"), Some(Ok(x)), Some(Ok(y))) => Ok(Set(x, y)),
(Some("add"), Some(Ok(x)), Some(Ok(y))) => Ok(Add(x, y)),
(Some("mul"), Some(Ok(x)), Some(Ok(y))) => Ok(Mul(x, y)),
(Some("mod"), Some(Ok(x)), Some(Ok(y))) => Ok(Mod(x, y)),
(Some("rcv"), Some(Ok(x)), _) => Ok(Rcv(x)),
(Some("jgz"), Some(Ok(x)), Some(Ok(y))) => Ok(Jgz(x, y)),
(_, _, _) => Err(format!("Unknown instruction {}", s))
}
}
}
#[derive(Debug, Clone, Copy)]
enum Data {
Literal(i64),
Register(char)
}
impl FromStr for Data {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use Data::*;
match (s.parse(), s.chars().next()) {
(Ok(num), _) => Ok(Literal(num)),
(Err(_), Some(c)) => Ok(Register(c)),
(_, _) => Err(format!("Invalid data {}", s))
}
}
}
| {
use Data::*;
match register {
Register(c) => {
self.registers.insert(c, value);
},
_ => {}
}
} |
UnknownInBlockTest.py | #Tests proper handling of Verifications with Transactions which don't exist.
from typing import Dict, List, Any
import json
from pytest import raises
from e2e.Libs.Minisketch import Sketch
from e2e.Classes.Merit.Block import Block
from e2e.Classes.Merit.Merit import Merit
from e2e.Classes.Consensus.VerificationPacket import VerificationPacket
from e2e.Meros.RPC import RPC
from e2e.Meros.Meros import MessageType
from e2e.Meros.Liver import Liver
from e2e.Tests.Errors import TestError, SuccessError
#pylint: disable=too-many-statements
def VUnknownInBlockTest(
rpc: RPC
) -> None:
vectors: Dict[str, Any]
with open("e2e/Vectors/Consensus/Verification/Parsable.json", "r") as file:
vectors = json.loads(file.read())
merit: Merit = Merit.fromJSON(vectors["blockchain"])
#Custom function to send the last Block and verify it errors at the right place.
def checkFail() -> None:
#This Block should cause the node to disconnect us AFTER it attempts to sync our Transaction.
|
with raises(SuccessError):
Liver(rpc, vectors["blockchain"], callbacks={1: checkFail}).live()
| syncedTX: bool = False
#Grab the Block.
block: Block = merit.blockchain.blocks[2]
#Send the Block.
rpc.meros.liveBlockHeader(block.header)
rpc.meros.handleBlockBody(block)
#Handle sync requests.
reqHash: bytes = bytes()
while True:
if syncedTX:
#Try receiving from the Live socket, where Meros sends keep-alives.
try:
if len(rpc.meros.live.recv()) != 0:
raise Exception()
except TestError:
raise SuccessError("Node disconnected us after we sent a parsable, yet invalid, Verification.")
except Exception:
raise TestError("Meros sent a keep-alive.")
msg: bytes = rpc.meros.sync.recv()
if MessageType(msg[0]) == MessageType.SketchHashesRequest:
if not block.body.packets:
raise TestError("Meros asked for Sketch Hashes from a Block without any.")
reqHash = msg[1 : 33]
if reqHash != block.header.hash:
raise TestError("Meros asked for Sketch Hashes that didn't belong to the Block we just sent it.")
#Create the hashes.
hashes: List[int] = []
for packet in block.body.packets:
hashes.append(Sketch.hash(block.header.sketchSalt, packet))
#Send the Sketch Hashes.
rpc.meros.sketchHashes(hashes)
elif MessageType(msg[0]) == MessageType.SketchHashRequests:
if not block.body.packets:
raise TestError("Meros asked for Verification Packets from a Block without any.")
reqHash = msg[1 : 33]
if reqHash != block.header.hash:
raise TestError("Meros asked for Verification Packets that didn't belong to the Block we just sent it.")
#Create a lookup of hash to packets.
packets: Dict[int, VerificationPacket] = {}
for packet in block.body.packets:
packets[Sketch.hash(block.header.sketchSalt, packet)] = packet
#Look up each requested packet and respond accordingly.
for h in range(int.from_bytes(msg[33 : 37], byteorder="little")):
sketchHash: int = int.from_bytes(msg[37 + (h * 8) : 45 + (h * 8)], byteorder="little")
if sketchHash not in packets:
raise TestError("Meros asked for a non-existent Sketch Hash.")
rpc.meros.packet(packets[sketchHash])
elif MessageType(msg[0]) == MessageType.TransactionRequest:
rpc.meros.dataMissing()
syncedTX = True
else:
raise TestError("Unexpected message sent: " + msg.hex().upper()) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.